You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zg...@apache.org on 2017/05/18 09:51:05 UTC

[1/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Repository: hbase
Updated Branches:
  refs/heads/master 32d2062b5 -> 37dd8ff72


http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
index f8aa5ca..0ee28d1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java
@@ -33,8 +33,8 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-shell/src/main/ruby/hbase/admin.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb
index 920cdf8..749b2e8 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -972,14 +972,16 @@ module Hbase
 
     #----------------------------------------------------------------------------------------------
     # Restore specified snapshot
-    def restore_snapshot(snapshot_name)
-      @admin.restoreSnapshot(snapshot_name)
+    def restore_snapshot(snapshot_name, restore_acl = false)
+      conf = @connection.getConfiguration
+      take_fail_safe_snapshot = conf.getBoolean("hbase.snapshot.restore.take.failsafe.snapshot", false)
+      @admin.restoreSnapshot(snapshot_name, take_fail_safe_snapshot, restore_acl)
     end
 
     #----------------------------------------------------------------------------------------------
     # Create a new table by cloning the snapshot content
-    def clone_snapshot(snapshot_name, table)
-      @admin.cloneSnapshot(snapshot_name, TableName.valueOf(table))
+    def clone_snapshot(snapshot_name, table, restore_acl = false)
+      @admin.cloneSnapshot(snapshot_name, TableName.valueOf(table), restore_acl)
     end
 
     #----------------------------------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-shell/src/main/ruby/hbase_constants.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb b/hbase-shell/src/main/ruby/hbase_constants.rb
index 55ae9e7..52819c0 100644
--- a/hbase-shell/src/main/ruby/hbase_constants.rb
+++ b/hbase-shell/src/main/ruby/hbase_constants.rb
@@ -83,6 +83,7 @@ module HBaseConstants
   DATA = 'DATA'
   SERVER_NAME = 'SERVER_NAME'
   LOCALITY_THRESHOLD = 'LOCALITY_THRESHOLD'
+  RESTORE_ACL = 'RESTORE_ACL'
 
   # Load constants from hbase java API
   def self.promote_constants(constants)

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb b/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
index c57e87f..f95c652 100644
--- a/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
@@ -28,11 +28,18 @@ And writing on the newly created table will not influence the snapshot data.
 Examples:
   hbase> clone_snapshot 'snapshotName', 'tableName'
   hbase> clone_snapshot 'snapshotName', 'namespace:tableName'
+
+Following command will restore all acl from origin snapshot table into the
+newly created table.
+
+  hbase> clone_snapshot 'snapshotName', 'namespace:tableName', {RESTORE_ACL=>true}
 EOF
       end
 
-      def command(snapshot_name, table)
-        admin.clone_snapshot(snapshot_name, table)
+      def command(snapshot_name, table, args = {})
+        raise(ArgumentError, "Arguments should be a Hash") unless args.kind_of?(Hash)
+        restore_acl = args.delete(RESTORE_ACL) || false
+        admin.clone_snapshot(snapshot_name, table, restore_acl)
       end
 
       def handle_exceptions(cause, *args)

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb b/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb
index 2471e1b..85a30a1 100644
--- a/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/restore_snapshot.rb
@@ -28,11 +28,17 @@ The table must be disabled.
 
 Examples:
   hbase> restore_snapshot 'snapshotName'
+
+Following command will restore all acl from snapshot table into the table.
+
+  hbase> restore_snapshot 'snapshotName', {RESTORE_ACL=>true}
 EOF
       end
 
-      def command(snapshot_name)
-        admin.restore_snapshot(snapshot_name)
+      def command(snapshot_name, args = {})
+        raise(ArgumentError, "Arguments should be a Hash") unless args.kind_of?(Hash)
+        restore_acl = args.delete(RESTORE_ACL) || false
+        admin.restore_snapshot(snapshot_name, restore_acl)
       end
     end
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
----------------------------------------------------------------------
diff --git a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
index 04fbc7a..4279d89 100644
--- a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
+++ b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
@@ -23,7 +23,6 @@ import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.jruby.embed.PathType;
 import org.junit.Test;
-import org.junit.Ignore;
 import org.junit.experimental.categories.Category;
 
 @Category({ ClientTests.class, LargeTests.class })


[7/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
new file mode 100644
index 0000000..06a4e01
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
@@ -0,0 +1,11171 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: AccessControl.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class AccessControlProtos {
+  private AccessControlProtos() {}
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
+  }
+  public interface PermissionOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.Permission)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>required .hbase.pb.Permission.Type type = 1;</code>
+     */
+    boolean hasType();
+    /**
+     * <code>required .hbase.pb.Permission.Type type = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type getType();
+
+    /**
+     * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+     */
+    boolean hasGlobalPermission();
+    /**
+     * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission getGlobalPermission();
+    /**
+     * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder getGlobalPermissionOrBuilder();
+
+    /**
+     * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+     */
+    boolean hasNamespacePermission();
+    /**
+     * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission getNamespacePermission();
+    /**
+     * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder getNamespacePermissionOrBuilder();
+
+    /**
+     * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+     */
+    boolean hasTablePermission();
+    /**
+     * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission getTablePermission();
+    /**
+     * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder getTablePermissionOrBuilder();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.Permission}
+   */
+  public  static final class Permission extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.Permission)
+      PermissionOrBuilder {
+    // Use Permission.newBuilder() to construct.
+    private Permission(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private Permission() {
+      type_ = 1;
+    }
+
+    @java.lang.Override
+    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private Permission(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      int mutable_bitField0_ = 0;
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(1, rawValue);
+              } else {
+                bitField0_ |= 0x00000001;
+                type_ = rawValue;
+              }
+              break;
+            }
+            case 18: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000002) == 0x00000002)) {
+                subBuilder = globalPermission_.toBuilder();
+              }
+              globalPermission_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(globalPermission_);
+                globalPermission_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000002;
+              break;
+            }
+            case 26: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000004) == 0x00000004)) {
+                subBuilder = namespacePermission_.toBuilder();
+              }
+              namespacePermission_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(namespacePermission_);
+                namespacePermission_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000004;
+              break;
+            }
+            case 34: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000008) == 0x00000008)) {
+                subBuilder = tablePermission_.toBuilder();
+              }
+              tablePermission_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(tablePermission_);
+                tablePermission_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000008;
+              break;
+            }
+          }
+        }
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_Permission_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_Permission_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder.class);
+    }
+
+    /**
+     * Protobuf enum {@code hbase.pb.Permission.Action}
+     */
+    public enum Action
+        implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
+      /**
+       * <code>READ = 0;</code>
+       */
+      READ(0),
+      /**
+       * <code>WRITE = 1;</code>
+       */
+      WRITE(1),
+      /**
+       * <code>EXEC = 2;</code>
+       */
+      EXEC(2),
+      /**
+       * <code>CREATE = 3;</code>
+       */
+      CREATE(3),
+      /**
+       * <code>ADMIN = 4;</code>
+       */
+      ADMIN(4),
+      ;
+
+      /**
+       * <code>READ = 0;</code>
+       */
+      public static final int READ_VALUE = 0;
+      /**
+       * <code>WRITE = 1;</code>
+       */
+      public static final int WRITE_VALUE = 1;
+      /**
+       * <code>EXEC = 2;</code>
+       */
+      public static final int EXEC_VALUE = 2;
+      /**
+       * <code>CREATE = 3;</code>
+       */
+      public static final int CREATE_VALUE = 3;
+      /**
+       * <code>ADMIN = 4;</code>
+       */
+      public static final int ADMIN_VALUE = 4;
+
+
+      public final int getNumber() {
+        return value;
+      }
+
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static Action valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static Action forNumber(int value) {
+        switch (value) {
+          case 0: return READ;
+          case 1: return WRITE;
+          case 2: return EXEC;
+          case 3: return CREATE;
+          case 4: return ADMIN;
+          default: return null;
+        }
+      }
+
+      public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Action>
+          internalGetValueMap() {
+        return internalValueMap;
+      }
+      private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+          Action> internalValueMap =
+            new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Action>() {
+              public Action findValueByNumber(int number) {
+                return Action.forNumber(number);
+              }
+            };
+
+      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
+          getValueDescriptor() {
+        return getDescriptor().getValues().get(ordinal());
+      }
+      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptorForType() {
+        return getDescriptor();
+      }
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.getDescriptor().getEnumTypes().get(0);
+      }
+
+      private static final Action[] VALUES = values();
+
+      public static Action valueOf(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+        if (desc.getType() != getDescriptor()) {
+          throw new java.lang.IllegalArgumentException(
+            "EnumValueDescriptor is not for this type.");
+        }
+        return VALUES[desc.getIndex()];
+      }
+
+      private final int value;
+
+      private Action(int value) {
+        this.value = value;
+      }
+
+      // @@protoc_insertion_point(enum_scope:hbase.pb.Permission.Action)
+    }
+
+    /**
+     * Protobuf enum {@code hbase.pb.Permission.Type}
+     */
+    public enum Type
+        implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
+      /**
+       * <code>Global = 1;</code>
+       */
+      Global(1),
+      /**
+       * <code>Namespace = 2;</code>
+       */
+      Namespace(2),
+      /**
+       * <code>Table = 3;</code>
+       */
+      Table(3),
+      ;
+
+      /**
+       * <code>Global = 1;</code>
+       */
+      public static final int Global_VALUE = 1;
+      /**
+       * <code>Namespace = 2;</code>
+       */
+      public static final int Namespace_VALUE = 2;
+      /**
+       * <code>Table = 3;</code>
+       */
+      public static final int Table_VALUE = 3;
+
+
+      public final int getNumber() {
+        return value;
+      }
+
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static Type valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static Type forNumber(int value) {
+        switch (value) {
+          case 1: return Global;
+          case 2: return Namespace;
+          case 3: return Table;
+          default: return null;
+        }
+      }
+
+      public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>
+          internalGetValueMap() {
+        return internalValueMap;
+      }
+      private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+          Type> internalValueMap =
+            new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>() {
+              public Type findValueByNumber(int number) {
+                return Type.forNumber(number);
+              }
+            };
+
+      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
+          getValueDescriptor() {
+        return getDescriptor().getValues().get(ordinal());
+      }
+      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptorForType() {
+        return getDescriptor();
+      }
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.getDescriptor().getEnumTypes().get(1);
+      }
+
+      private static final Type[] VALUES = values();
+
+      public static Type valueOf(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+        if (desc.getType() != getDescriptor()) {
+          throw new java.lang.IllegalArgumentException(
+            "EnumValueDescriptor is not for this type.");
+        }
+        return VALUES[desc.getIndex()];
+      }
+
+      private final int value;
+
+      private Type(int value) {
+        this.value = value;
+      }
+
+      // @@protoc_insertion_point(enum_scope:hbase.pb.Permission.Type)
+    }
+
+    private int bitField0_;
+    public static final int TYPE_FIELD_NUMBER = 1;
+    private int type_;
+    /**
+     * <code>required .hbase.pb.Permission.Type type = 1;</code>
+     */
+    public boolean hasType() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required .hbase.pb.Permission.Type type = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type getType() {
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.valueOf(type_);
+      return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.Global : result;
+    }
+
+    public static final int GLOBAL_PERMISSION_FIELD_NUMBER = 2;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission globalPermission_;
+    /**
+     * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+     */
+    public boolean hasGlobalPermission() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission getGlobalPermission() {
+      return globalPermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.getDefaultInstance() : globalPermission_;
+    }
+    /**
+     * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder getGlobalPermissionOrBuilder() {
+      return globalPermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.getDefaultInstance() : globalPermission_;
+    }
+
+    public static final int NAMESPACE_PERMISSION_FIELD_NUMBER = 3;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission namespacePermission_;
+    /**
+     * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+     */
+    public boolean hasNamespacePermission() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission getNamespacePermission() {
+      return namespacePermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.getDefaultInstance() : namespacePermission_;
+    }
+    /**
+     * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder getNamespacePermissionOrBuilder() {
+      return namespacePermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.getDefaultInstance() : namespacePermission_;
+    }
+
+    public static final int TABLE_PERMISSION_FIELD_NUMBER = 4;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission tablePermission_;
+    /**
+     * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+     */
+    public boolean hasTablePermission() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission getTablePermission() {
+      return tablePermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance() : tablePermission_;
+    }
+    /**
+     * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder getTablePermissionOrBuilder() {
+      return tablePermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance() : tablePermission_;
+    }
+
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      if (!hasType()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (hasTablePermission()) {
+        if (!getTablePermission().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeEnum(1, type_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeMessage(2, getGlobalPermission());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeMessage(3, getNamespacePermission());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeMessage(4, getTablePermission());
+      }
+      unknownFields.writeTo(output);
+    }
+
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeEnumSize(1, type_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(2, getGlobalPermission());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(3, getNamespacePermission());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(4, getTablePermission());
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission) obj;
+
+      boolean result = true;
+      result = result && (hasType() == other.hasType());
+      if (hasType()) {
+        result = result && type_ == other.type_;
+      }
+      result = result && (hasGlobalPermission() == other.hasGlobalPermission());
+      if (hasGlobalPermission()) {
+        result = result && getGlobalPermission()
+            .equals(other.getGlobalPermission());
+      }
+      result = result && (hasNamespacePermission() == other.hasNamespacePermission());
+      if (hasNamespacePermission()) {
+        result = result && getNamespacePermission()
+            .equals(other.getNamespacePermission());
+      }
+      result = result && (hasTablePermission() == other.hasTablePermission());
+      if (hasTablePermission()) {
+        result = result && getTablePermission()
+            .equals(other.getTablePermission());
+      }
+      result = result && unknownFields.equals(other.unknownFields);
+      return result;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasType()) {
+        hash = (37 * hash) + TYPE_FIELD_NUMBER;
+        hash = (53 * hash) + type_;
+      }
+      if (hasGlobalPermission()) {
+        hash = (37 * hash) + GLOBAL_PERMISSION_FIELD_NUMBER;
+        hash = (53 * hash) + getGlobalPermission().hashCode();
+      }
+      if (hasNamespacePermission()) {
+        hash = (37 * hash) + NAMESPACE_PERMISSION_FIELD_NUMBER;
+        hash = (53 * hash) + getNamespacePermission().hashCode();
+      }
+      if (hasTablePermission()) {
+        hash = (37 * hash) + TABLE_PERMISSION_FIELD_NUMBER;
+        hash = (53 * hash) + getTablePermission().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(byte[] data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(
+        byte[] data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseDelimitedFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.Permission}
+     */
+    public static final class Builder extends
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.Permission)
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.PermissionOrBuilder {
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_Permission_descriptor;
+      }
+
+      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_Permission_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+          getGlobalPermissionFieldBuilder();
+          getNamespacePermissionFieldBuilder();
+          getTablePermissionFieldBuilder();
+        }
+      }
+      public Builder clear() {
+        super.clear();
+        type_ = 1;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        if (globalPermissionBuilder_ == null) {
+          globalPermission_ = null;
+        } else {
+          globalPermissionBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000002);
+        if (namespacePermissionBuilder_ == null) {
+          namespacePermission_ = null;
+        } else {
+          namespacePermissionBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000004);
+        if (tablePermissionBuilder_ == null) {
+          tablePermission_ = null;
+        } else {
+          tablePermissionBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000008);
+        return this;
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_Permission_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission build() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission buildPartial() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission result = new org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.type_ = type_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        if (globalPermissionBuilder_ == null) {
+          result.globalPermission_ = globalPermission_;
+        } else {
+          result.globalPermission_ = globalPermissionBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        if (namespacePermissionBuilder_ == null) {
+          result.namespacePermission_ = namespacePermission_;
+        } else {
+          result.namespacePermission_ = namespacePermissionBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        if (tablePermissionBuilder_ == null) {
+          result.tablePermission_ = tablePermission_;
+        } else {
+          result.tablePermission_ = tablePermissionBuilder_.build();
+        }
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission) {
+          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission other) {
+        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.getDefaultInstance()) return this;
+        if (other.hasType()) {
+          setType(other.getType());
+        }
+        if (other.hasGlobalPermission()) {
+          mergeGlobalPermission(other.getGlobalPermission());
+        }
+        if (other.hasNamespacePermission()) {
+          mergeNamespacePermission(other.getNamespacePermission());
+        }
+        if (other.hasTablePermission()) {
+          mergeTablePermission(other.getTablePermission());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasType()) {
+          return false;
+        }
+        if (hasTablePermission()) {
+          if (!getTablePermission().isInitialized()) {
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      private int type_ = 1;
+      /**
+       * <code>required .hbase.pb.Permission.Type type = 1;</code>
+       */
+      public boolean hasType() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required .hbase.pb.Permission.Type type = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type getType() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.valueOf(type_);
+        return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.Global : result;
+      }
+      /**
+       * <code>required .hbase.pb.Permission.Type type = 1;</code>
+       */
+      public Builder setType(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        bitField0_ |= 0x00000001;
+        type_ = value.getNumber();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required .hbase.pb.Permission.Type type = 1;</code>
+       */
+      public Builder clearType() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        type_ = 1;
+        onChanged();
+        return this;
+      }
+
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission globalPermission_ = null;
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder> globalPermissionBuilder_;
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public boolean hasGlobalPermission() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission getGlobalPermission() {
+        if (globalPermissionBuilder_ == null) {
+          return globalPermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.getDefaultInstance() : globalPermission_;
+        } else {
+          return globalPermissionBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public Builder setGlobalPermission(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission value) {
+        if (globalPermissionBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          globalPermission_ = value;
+          onChanged();
+        } else {
+          globalPermissionBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000002;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public Builder setGlobalPermission(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder builderForValue) {
+        if (globalPermissionBuilder_ == null) {
+          globalPermission_ = builderForValue.build();
+          onChanged();
+        } else {
+          globalPermissionBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000002;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public Builder mergeGlobalPermission(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission value) {
+        if (globalPermissionBuilder_ == null) {
+          if (((bitField0_ & 0x00000002) == 0x00000002) &&
+              globalPermission_ != null &&
+              globalPermission_ != org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.getDefaultInstance()) {
+            globalPermission_ =
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.newBuilder(globalPermission_).mergeFrom(value).buildPartial();
+          } else {
+            globalPermission_ = value;
+          }
+          onChanged();
+        } else {
+          globalPermissionBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000002;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public Builder clearGlobalPermission() {
+        if (globalPermissionBuilder_ == null) {
+          globalPermission_ = null;
+          onChanged();
+        } else {
+          globalPermissionBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder getGlobalPermissionBuilder() {
+        bitField0_ |= 0x00000002;
+        onChanged();
+        return getGlobalPermissionFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder getGlobalPermissionOrBuilder() {
+        if (globalPermissionBuilder_ != null) {
+          return globalPermissionBuilder_.getMessageOrBuilder();
+        } else {
+          return globalPermission_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.getDefaultInstance() : globalPermission_;
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.GlobalPermission global_permission = 2;</code>
+       */
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder>
+          getGlobalPermissionFieldBuilder() {
+        if (globalPermissionBuilder_ == null) {
+          globalPermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder>(
+                  getGlobalPermission(),
+                  getParentForChildren(),
+                  isClean());
+          globalPermission_ = null;
+        }
+        return globalPermissionBuilder_;
+      }
+
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission namespacePermission_ = null;
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder> namespacePermissionBuilder_;
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public boolean hasNamespacePermission() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission getNamespacePermission() {
+        if (namespacePermissionBuilder_ == null) {
+          return namespacePermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.getDefaultInstance() : namespacePermission_;
+        } else {
+          return namespacePermissionBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public Builder setNamespacePermission(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission value) {
+        if (namespacePermissionBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          namespacePermission_ = value;
+          onChanged();
+        } else {
+          namespacePermissionBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000004;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public Builder setNamespacePermission(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder builderForValue) {
+        if (namespacePermissionBuilder_ == null) {
+          namespacePermission_ = builderForValue.build();
+          onChanged();
+        } else {
+          namespacePermissionBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000004;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public Builder mergeNamespacePermission(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission value) {
+        if (namespacePermissionBuilder_ == null) {
+          if (((bitField0_ & 0x00000004) == 0x00000004) &&
+              namespacePermission_ != null &&
+              namespacePermission_ != org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.getDefaultInstance()) {
+            namespacePermission_ =
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.newBuilder(namespacePermission_).mergeFrom(value).buildPartial();
+          } else {
+            namespacePermission_ = value;
+          }
+          onChanged();
+        } else {
+          namespacePermissionBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000004;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public Builder clearNamespacePermission() {
+        if (namespacePermissionBuilder_ == null) {
+          namespacePermission_ = null;
+          onChanged();
+        } else {
+          namespacePermissionBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000004);
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder getNamespacePermissionBuilder() {
+        bitField0_ |= 0x00000004;
+        onChanged();
+        return getNamespacePermissionFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder getNamespacePermissionOrBuilder() {
+        if (namespacePermissionBuilder_ != null) {
+          return namespacePermissionBuilder_.getMessageOrBuilder();
+        } else {
+          return namespacePermission_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.getDefaultInstance() : namespacePermission_;
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.NamespacePermission namespace_permission = 3;</code>
+       */
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder>
+          getNamespacePermissionFieldBuilder() {
+        if (namespacePermissionBuilder_ == null) {
+          namespacePermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder>(
+                  getNamespacePermission(),
+                  getParentForChildren(),
+                  isClean());
+          namespacePermission_ = null;
+        }
+        return namespacePermissionBuilder_;
+      }
+
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission tablePermission_ = null;
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder> tablePermissionBuilder_;
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public boolean hasTablePermission() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission getTablePermission() {
+        if (tablePermissionBuilder_ == null) {
+          return tablePermission_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance() : tablePermission_;
+        } else {
+          return tablePermissionBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public Builder setTablePermission(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission value) {
+        if (tablePermissionBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          tablePermission_ = value;
+          onChanged();
+        } else {
+          tablePermissionBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000008;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public Builder setTablePermission(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder builderForValue) {
+        if (tablePermissionBuilder_ == null) {
+          tablePermission_ = builderForValue.build();
+          onChanged();
+        } else {
+          tablePermissionBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000008;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public Builder mergeTablePermission(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission value) {
+        if (tablePermissionBuilder_ == null) {
+          if (((bitField0_ & 0x00000008) == 0x00000008) &&
+              tablePermission_ != null &&
+              tablePermission_ != org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance()) {
+            tablePermission_ =
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.newBuilder(tablePermission_).mergeFrom(value).buildPartial();
+          } else {
+            tablePermission_ = value;
+          }
+          onChanged();
+        } else {
+          tablePermissionBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000008;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public Builder clearTablePermission() {
+        if (tablePermissionBuilder_ == null) {
+          tablePermission_ = null;
+          onChanged();
+        } else {
+          tablePermissionBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000008);
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder getTablePermissionBuilder() {
+        bitField0_ |= 0x00000008;
+        onChanged();
+        return getTablePermissionFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder getTablePermissionOrBuilder() {
+        if (tablePermissionBuilder_ != null) {
+          return tablePermissionBuilder_.getMessageOrBuilder();
+        } else {
+          return tablePermission_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance() : tablePermission_;
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.TablePermission table_permission = 4;</code>
+       */
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder>
+          getTablePermissionFieldBuilder() {
+        if (tablePermissionBuilder_ == null) {
+          tablePermissionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder>(
+                  getTablePermission(),
+                  getParentForChildren(),
+                  isClean());
+          tablePermission_ = null;
+        }
+        return tablePermissionBuilder_;
+      }
+      public final Builder setUnknownFields(
+          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.Permission)
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.Permission)
+    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission();
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Permission>
+        PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Permission>() {
+      public Permission parsePartialFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+          return new Permission(input, extensionRegistry);
+      }
+    };
+
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Permission> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Permission> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface TablePermissionOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.TablePermission)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>optional .hbase.pb.TableName table_name = 1;</code>
+     */
+    boolean hasTableName();
+    /**
+     * <code>optional .hbase.pb.TableName table_name = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName();
+    /**
+     * <code>optional .hbase.pb.TableName table_name = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
+    boolean hasFamily();
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily();
+
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
+    boolean hasQualifier();
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier();
+
+    /**
+     * <code>repeated .hbase.pb.Permission.Action action = 4;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action> getActionList();
+    /**
+     * <code>repeated .hbase.pb.Permission.Action action = 4;</code>
+     */
+    int getActionCount();
+    /**
+     * <code>repeated .hbase.pb.Permission.Action action = 4;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index);
+  }
+  /**
+   * Protobuf type {@code hbase.pb.TablePermission}
+   */
+  public  static final class TablePermission extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.TablePermission)
+      TablePermissionOrBuilder {
+    // Use TablePermission.newBuilder() to construct.
+    private TablePermission(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private TablePermission() {
+      family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+      qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+      action_ = java.util.Collections.emptyList();
+    }
+
+    @java.lang.Override
+    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private TablePermission(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      int mutable_bitField0_ = 0;
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000001) == 0x00000001)) {
+                subBuilder = tableName_.toBuilder();
+              }
+              tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(tableName_);
+                tableName_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000001;
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              family_ = input.readBytes();
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              qualifier_ = input.readBytes();
+              break;
+            }
+            case 32: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(4, rawValue);
+              } else {
+                if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+                  action_ = new java.util.ArrayList<java.lang.Integer>();
+                  mutable_bitField0_ |= 0x00000008;
+                }
+                action_.add(rawValue);
+              }
+              break;
+            }
+            case 34: {
+              int length = input.readRawVarint32();
+              int oldLimit = input.pushLimit(length);
+              while(input.getBytesUntilLimit() > 0) {
+                int rawValue = input.readEnum();
+                org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action value = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(rawValue);
+                if (value == null) {
+                  unknownFields.mergeVarintField(4, rawValue);
+                } else {
+                  if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+                    action_ = new java.util.ArrayList<java.lang.Integer>();
+                    mutable_bitField0_ |= 0x00000008;
+                  }
+                  action_.add(rawValue);
+                }
+              }
+              input.popLimit(oldLimit);
+              break;
+            }
+          }
+        }
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+          action_ = java.util.Collections.unmodifiableList(action_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_TablePermission_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_TablePermission_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder.class);
+    }
+
+    private int bitField0_;
+    public static final int TABLE_NAME_FIELD_NUMBER = 1;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
+    /**
+     * <code>optional .hbase.pb.TableName table_name = 1;</code>
+     */
+    public boolean hasTableName() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional .hbase.pb.TableName table_name = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
+      return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
+    }
+    /**
+     * <code>optional .hbase.pb.TableName table_name = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+      return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
+    }
+
+    public static final int FAMILY_FIELD_NUMBER = 2;
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString family_;
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
+    public boolean hasFamily() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional bytes family = 2;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getFamily() {
+      return family_;
+    }
+
+    public static final int QUALIFIER_FIELD_NUMBER = 3;
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString qualifier_;
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
+    public boolean hasQualifier() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional bytes qualifier = 3;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getQualifier() {
+      return qualifier_;
+    }
+
+    public static final int ACTION_FIELD_NUMBER = 4;
+    private java.util.List<java.lang.Integer> action_;
+    private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.ListAdapter.Converter<
+        java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action> action_converter_ =
+            new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.ListAdapter.Converter<
+                java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action>() {
+              public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action convert(java.lang.Integer from) {
+                org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action result = org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.valueOf(from);
+                return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.READ : result;
+              }
+            };
+    /**
+     * <code>repeated .hbase.pb.Permission.Action action = 4;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action> getActionList() {
+      return new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.ListAdapter<
+          java.lang.Integer, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action>(action_, action_converter_);
+    }
+    /**
+     * <code>repeated .hbase.pb.Permission.Action action = 4;</code>
+     */
+    public int getActionCount() {
+      return action_.size();
+    }
+    /**
+     * <code>repeated .hbase.pb.Permission.Action action = 4;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action getAction(int index) {
+      return action_converter_.convert(action_.get(index));
+    }
+
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      if (hasTableName()) {
+        if (!getTableName().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeMessage(1, getTableName());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, family_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, qualifier_);
+      }
+      for (int i = 0; i < action_.size(); i++) {
+        output.writeEnum(4, action_.get(i));
+      }
+      unknownFields.writeTo(output);
+    }
+
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, getTableName());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, family_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, qualifier_);
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < action_.size(); i++) {
+          dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+            .computeEnumSizeNoTag(action_.get(i));
+        }
+        size += dataSize;
+        size += 1 * action_.size();
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission other = (org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission) obj;
+
+      boolean result = true;
+      result = result && (hasTableName() == other.hasTableName());
+      if (hasTableName()) {
+        result = result && getTableName()
+            .equals(other.getTableName());
+      }
+      result = result && (hasFamily() == other.hasFamily());
+      if (hasFamily()) {
+        result = result && getFamily()
+            .equals(other.getFamily());
+      }
+      result = result && (hasQualifier() == other.hasQualifier());
+      if (hasQualifier()) {
+        result = result && getQualifier()
+            .equals(other.getQualifier());
+      }
+      result = result && action_.equals(other.action_);
+      result = result && unknownFields.equals(other.unknownFields);
+      return result;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasTableName()) {
+        hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+        hash = (53 * hash) + getTableName().hashCode();
+      }
+      if (hasFamily()) {
+        hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+        hash = (53 * hash) + getFamily().hashCode();
+      }
+      if (hasQualifier()) {
+        hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+        hash = (53 * hash) + getQualifier().hashCode();
+      }
+      if (getActionCount() > 0) {
+        hash = (37 * hash) + ACTION_FIELD_NUMBER;
+        hash = (53 * hash) + action_.hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(byte[] data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(
+        byte[] data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseDelimitedFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.TablePermission}
+     */
+    public static final class Builder extends
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.TablePermission)
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder {
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_TablePermission_descriptor;
+      }
+
+      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_TablePermission_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.class, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+          getTableNameFieldBuilder();
+        }
+      }
+      public Builder clear() {
+        super.clear();
+        if (tableNameBuilder_ == null) {
+          tableName_ = null;
+        } else {
+          tableNameBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000001);
+        family_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        qualifier_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        action_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000008);
+        return this;
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_TablePermission_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission build() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission buildPartial() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission result = new org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        if (tableNameBuilder_ == null) {
+          result.tableName_ = tableName_;
+        } else {
+          result.tableName_ = tableNameBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.family_ = family_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.qualifier_ = qualifier_;
+        if (((bitField0_ & 0x00000008) == 0x00000008)) {
+          action_ = java.util.Collections.unmodifiableList(action_);
+          bitField0_ = (bitField0_ & ~0x00000008);
+        }
+        result.action_ = action_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission) {
+          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission other) {
+        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.getDefaultInstance()) return this;
+        if (other.hasTableName()) {
+          mergeTableName(other.getTableName());
+        }
+        if (other.hasFamily()) {
+          setFamily(other.getFamily());
+        }
+        if (other.hasQualifier()) {
+          setQualifier(other.getQualifier());
+        }
+        if (!other.action_.isEmpty()) {
+          if (action_.isEmpty()) {
+            action_ = other.action_;
+            bitField0_ = (bitField0_ & ~0x00000008);
+          } else {
+            ensureActionIsMutable();
+            action_.addAll(other.action_);
+          }
+          onChanged();
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (hasTableName()) {
+          if (!getTableName().isInitialized()) {
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parsedMessage = null

<TRUNCATED>

[8/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Signed-off-by: Guanghao Zhang <zg...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/37dd8ff7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/37dd8ff7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/37dd8ff7

Branch: refs/heads/master
Commit: 37dd8ff722fa762d9ef86488dea90e5470672e67
Parents: 32d2062
Author: huzheng <op...@gmail.com>
Authored: Mon May 8 21:01:47 2017 +0800
Committer: Guanghao Zhang <zg...@apache.org>
Committed: Thu May 18 17:39:50 2017 +0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/client/Admin.java   |    30 +
 .../hadoop/hbase/client/AsyncHBaseAdmin.java    |     5 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |    55 +-
 .../access/ShadedAccessControlUtil.java         |   277 +
 .../hbase/security/access/TablePermission.java  |     4 +
 .../hbase/shaded/protobuf/ProtobufUtil.java     |    20 +-
 .../ClientSnapshotDescriptionUtils.java         |     5 +-
 .../protobuf/generated/AccessControlProtos.java | 11171 +++++++++++++++++
 .../shaded/protobuf/generated/AdminProtos.java  |     2 -
 .../shaded/protobuf/generated/HBaseProtos.java  |  1393 +-
 .../generated/MasterProcedureProtos.java        |   516 +-
 .../shaded/protobuf/generated/MasterProtos.java |  1191 +-
 .../protobuf/generated/SnapshotProtos.java      |  1585 ++-
 .../src/main/protobuf/AccessControl.proto       |   130 +
 .../src/main/protobuf/HBase.proto               |    17 +-
 .../src/main/protobuf/Master.proto              |     2 +
 .../src/main/protobuf/MasterProcedure.proto     |     3 +
 .../src/main/protobuf/Snapshot.proto            |    19 +
 .../hbase/rsgroup/RSGroupAdminEndpoint.java     |     4 +-
 .../hadoop/hbase/backup/util/RestoreTool.java   |     4 +-
 .../hbase/coprocessor/MasterObserver.java       |     2 +-
 .../mapreduce/TableSnapshotInputFormatImpl.java |     2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |     7 +-
 .../hbase/master/MasterCoprocessorHost.java     |     2 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |     6 +-
 .../hadoop/hbase/master/SnapshotSentinel.java   |     2 +-
 .../procedure/CloneSnapshotProcedure.java       |    24 +-
 .../procedure/RestoreSnapshotProcedure.java     |    25 +-
 .../snapshot/DisabledTableSnapshotHandler.java  |     2 +-
 .../snapshot/EnabledTableSnapshotHandler.java   |     2 +-
 .../master/snapshot/MasterSnapshotVerifier.java |     2 +-
 .../hbase/master/snapshot/SnapshotManager.java  |    46 +-
 .../master/snapshot/TakeSnapshotHandler.java    |     2 +-
 .../hadoop/hbase/regionserver/HRegion.java      |     2 +-
 .../snapshot/FlushSnapshotSubprocedure.java     |     2 +-
 .../snapshot/RegionServerSnapshotManager.java   |     2 +-
 .../security/access/AccessControlLists.java     |     2 +-
 .../hbase/security/access/AccessController.java |     2 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.java   |     2 +-
 .../hbase/snapshot/RestoreSnapshotHelper.java   |    29 +-
 .../snapshot/SnapshotDescriptionUtils.java      |    44 +-
 .../hadoop/hbase/snapshot/SnapshotInfo.java     |    21 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |     2 +-
 .../hbase/snapshot/SnapshotManifestV1.java      |     2 +-
 .../hbase/snapshot/SnapshotManifestV2.java      |     4 +-
 .../hbase/snapshot/SnapshotReferenceUtil.java   |     2 +-
 .../hbase-webapps/master/snapshotsStats.jsp     |     5 +-
 .../hbase/client/TestSnapshotWithAcl.java       |   240 +
 .../hbase/coprocessor/TestMasterObserver.java   |     2 +-
 .../master/cleaner/TestSnapshotFromMaster.java  |     2 +-
 .../procedure/TestCloneSnapshotProcedure.java   |    13 +-
 .../procedure/TestRestoreSnapshotProcedure.java |     3 +-
 .../hbase/security/access/SecureTestUtil.java   |     2 +-
 .../security/access/TestAccessController.java   |     3 +-
 .../access/TestWithDisabledAuthorization.java   |     2 +-
 .../hbase/snapshot/SnapshotTestingUtils.java    |    34 +-
 .../hbase/snapshot/TestExportSnapshot.java      |     2 +-
 .../snapshot/TestFlushSnapshotFromClient.java   |     5 +-
 .../snapshot/TestRestoreSnapshotHelper.java     |     2 +-
 .../snapshot/TestSnapshotClientRetries.java     |     2 +-
 .../snapshot/TestSnapshotDescriptionUtils.java  |     4 +-
 .../hbase/snapshot/TestSnapshotManifest.java    |     2 +-
 hbase-shell/src/main/ruby/hbase/admin.rb        |    10 +-
 hbase-shell/src/main/ruby/hbase_constants.rb    |     1 +
 .../main/ruby/shell/commands/clone_snapshot.rb  |    11 +-
 .../ruby/shell/commands/restore_snapshot.rb     |    10 +-
 .../hbase/client/TestReplicationShell.java      |     1 -
 67 files changed, 14677 insertions(+), 2352 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 6e7c566..9300372 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1521,6 +1521,23 @@ public interface Admin extends Abortable, Closeable {
       throws IOException, RestoreSnapshotException;
 
   /**
+   * Restore the specified snapshot on the original table. (The table must be disabled) If
+   * 'takeFailSafeSnapshot' is set to true, a snapshot of the current table is taken before
+   * executing the restore operation. In case of restore failure, the failsafe snapshot will be
+   * restored. If the restore completes without problem the failsafe snapshot is deleted. The
+   * failsafe snapshot name is configurable by using the property
+   * "hbase.snapshot.restore.failsafe.name".
+   * @param snapshotName name of the snapshot to restore
+   * @param takeFailSafeSnapshot true if the failsafe snapshot should be taken
+   * @param restoreAcl true to restore acl of snapshot
+   * @throws IOException if a remote or network exception occurs
+   * @throws RestoreSnapshotException if snapshot failed to be restored
+   * @throws IllegalArgumentException if the restore request is formatted incorrectly
+   */
+  void restoreSnapshot(final String snapshotName, final boolean takeFailSafeSnapshot,
+      final boolean restoreAcl) throws IOException, RestoreSnapshotException;
+
+  /**
    * Create a new table by cloning the snapshot content.
    *
    * @param snapshotName name of the snapshot to be cloned
@@ -1535,6 +1552,19 @@ public interface Admin extends Abortable, Closeable {
 
   /**
    * Create a new table by cloning the snapshot content.
+   * @param snapshotName name of the snapshot to be cloned
+   * @param tableName name of the table where the snapshot will be restored
+   * @param restoreAcl true to clone acl into newly created table
+   * @throws IOException if a remote or network exception occurs
+   * @throws TableExistsException if table to be created already exists
+   * @throws RestoreSnapshotException if snapshot failed to be cloned
+   * @throws IllegalArgumentException if the specified table has not a valid name
+   */
+  void cloneSnapshot(final String snapshotName, final TableName tableName, final boolean restoreAcl)
+      throws IOException, TableExistsException, RestoreSnapshotException;
+
+  /**
+   * Create a new table by cloning the snapshot content.
    *
    * @param snapshotName name of the snapshot to be cloned
    * @param tableName name of the table where the snapshot will be restored

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
index baad871..5e64fa3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java
@@ -178,6 +178,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Remov
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
 import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
@@ -1706,7 +1707,7 @@ public class AsyncHBaseAdmin implements AsyncAdmin {
 
   @Override
   public CompletableFuture<Void> snapshot(SnapshotDescription snapshotDesc) {
-    HBaseProtos.SnapshotDescription snapshot =
+    SnapshotProtos.SnapshotDescription snapshot =
         ProtobufUtil.createHBaseProtosSnapshotDesc(snapshotDesc);
     try {
       ClientSnapshotDescriptionUtils.assertSnapshotRequestIsValid(snapshot);
@@ -1916,7 +1917,7 @@ public class AsyncHBaseAdmin implements AsyncAdmin {
 
   private CompletableFuture<Void> internalRestoreSnapshot(String snapshotName,
       TableName tableName) {
-    HBaseProtos.SnapshotDescription snapshot = HBaseProtos.SnapshotDescription.newBuilder()
+    SnapshotProtos.SnapshotDescription snapshot = SnapshotProtos.SnapshotDescription.newBuilder()
         .setName(snapshotName).setTable(tableName.getNameAsString()).build();
     try {
       ClientSnapshotDescriptionUtils.assertSnapshotRequestIsValid(snapshot);

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index ca5f0d2..d9bd75e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -107,7 +107,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
@@ -186,6 +185,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRe
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
@@ -2420,7 +2420,7 @@ public class HBaseAdmin implements Admin {
   public void snapshot(SnapshotDescription snapshotDesc)
       throws IOException, SnapshotCreationException, IllegalArgumentException {
     // actually take the snapshot
-    HBaseProtos.SnapshotDescription snapshot =
+    SnapshotProtos.SnapshotDescription snapshot =
       ProtobufUtil.createHBaseProtosSnapshotDesc(snapshotDesc);
     SnapshotResponse response = asyncSnapshot(snapshot);
     final IsSnapshotDoneRequest request =
@@ -2466,7 +2466,7 @@ public class HBaseAdmin implements Admin {
     asyncSnapshot(ProtobufUtil.createHBaseProtosSnapshotDesc(snapshotDesc));
   }
 
-  private SnapshotResponse asyncSnapshot(HBaseProtos.SnapshotDescription snapshot)
+  private SnapshotResponse asyncSnapshot(SnapshotProtos.SnapshotDescription snapshot)
       throws IOException {
     ClientSnapshotDescriptionUtils.assertSnapshotRequestIsValid(snapshot);
     final SnapshotRequest request = SnapshotRequest.newBuilder().setSnapshot(snapshot)
@@ -2484,7 +2484,7 @@ public class HBaseAdmin implements Admin {
   @Override
   public boolean isSnapshotFinished(final SnapshotDescription snapshotDesc)
       throws IOException, HBaseSnapshotException, UnknownSnapshotException {
-    final HBaseProtos.SnapshotDescription snapshot =
+    final SnapshotProtos.SnapshotDescription snapshot =
         ProtobufUtil.createHBaseProtosSnapshotDesc(snapshotDesc);
     return executeCallable(new MasterCallable<IsSnapshotDoneResponse>(getConnection(),
         getRpcControllerFactory()) {
@@ -2542,13 +2542,19 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public void restoreSnapshot(final String snapshotName, final boolean takeFailSafeSnapshot)
+  public void restoreSnapshot(String snapshotName, boolean takeFailSafeSnapshot)
       throws IOException, RestoreSnapshotException {
+    restoreSnapshot(snapshotName, takeFailSafeSnapshot, false);
+  }
+
+  @Override
+  public void restoreSnapshot(final String snapshotName, final boolean takeFailSafeSnapshot,
+      final boolean restoreAcl) throws IOException, RestoreSnapshotException {
     TableName tableName = getTableNameBeforeRestoreSnapshot(snapshotName);
 
     // The table does not exists, switch to clone.
     if (!tableExists(tableName)) {
-      cloneSnapshot(snapshotName, tableName);
+      cloneSnapshot(snapshotName, tableName, restoreAcl);
       return;
     }
 
@@ -2573,7 +2579,7 @@ public class HBaseAdmin implements Admin {
     try {
       // Restore snapshot
       get(
-        internalRestoreSnapshotAsync(snapshotName, tableName),
+        internalRestoreSnapshotAsync(snapshotName, tableName, restoreAcl),
         syncWaitTimeout,
         TimeUnit.MILLISECONDS);
     } catch (IOException e) {
@@ -2582,7 +2588,7 @@ public class HBaseAdmin implements Admin {
       if (takeFailSafeSnapshot) {
         try {
           get(
-            internalRestoreSnapshotAsync(failSafeSnapshotSnapshotName, tableName),
+            internalRestoreSnapshotAsync(failSafeSnapshotSnapshotName, tableName, restoreAcl),
             syncWaitTimeout,
             TimeUnit.MILLISECONDS);
           String msg = "Restore snapshot=" + snapshotName +
@@ -2625,7 +2631,7 @@ public class HBaseAdmin implements Admin {
       throw new TableNotDisabledException(tableName);
     }
 
-    return internalRestoreSnapshotAsync(snapshotName, tableName);
+    return internalRestoreSnapshotAsync(snapshotName, tableName, false);
   }
 
   @Override
@@ -2635,24 +2641,30 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public void cloneSnapshot(final String snapshotName, final TableName tableName)
+  public void cloneSnapshot(String snapshotName, TableName tableName, boolean restoreAcl)
       throws IOException, TableExistsException, RestoreSnapshotException {
     if (tableExists(tableName)) {
       throw new TableExistsException(tableName);
     }
     get(
-      internalRestoreSnapshotAsync(snapshotName, tableName),
+      internalRestoreSnapshotAsync(snapshotName, tableName, restoreAcl),
       Integer.MAX_VALUE,
       TimeUnit.MILLISECONDS);
   }
 
   @Override
+  public void cloneSnapshot(final String snapshotName, final TableName tableName)
+      throws IOException, TableExistsException, RestoreSnapshotException {
+    cloneSnapshot(snapshotName, tableName, false);
+  }
+
+  @Override
   public Future<Void> cloneSnapshotAsync(final String snapshotName, final TableName tableName)
       throws IOException, TableExistsException {
     if (tableExists(tableName)) {
       throw new TableExistsException(tableName);
     }
-    return internalRestoreSnapshotAsync(snapshotName, tableName);
+    return internalRestoreSnapshotAsync(snapshotName, tableName, false);
   }
 
   @Override
@@ -2740,10 +2752,10 @@ public class HBaseAdmin implements Admin {
    * @throws RestoreSnapshotException if snapshot failed to be restored
    * @throws IllegalArgumentException if the restore request is formatted incorrectly
    */
-  private Future<Void> internalRestoreSnapshotAsync(
-      final String snapshotName,
-      final TableName tableName) throws IOException, RestoreSnapshotException {
-    final HBaseProtos.SnapshotDescription snapshot = HBaseProtos.SnapshotDescription.newBuilder()
+  private Future<Void> internalRestoreSnapshotAsync(final String snapshotName,
+      final TableName tableName, final boolean restoreAcl)
+      throws IOException, RestoreSnapshotException {
+    final SnapshotProtos.SnapshotDescription snapshot = SnapshotProtos.SnapshotDescription.newBuilder()
         .setName(snapshotName).setTable(tableName.getNameAsString()).build();
 
     // actually restore the snapshot
@@ -2757,6 +2769,7 @@ public class HBaseAdmin implements Admin {
             .setSnapshot(snapshot)
             .setNonceGroup(ng.getNonceGroup())
             .setNonce(ng.newNonce())
+            .setRestoreACL(restoreAcl)
             .build();
         return master.restoreSnapshot(getRpcController(), request);
       }
@@ -2768,7 +2781,7 @@ public class HBaseAdmin implements Admin {
   private static class RestoreSnapshotFuture extends TableFuture<Void> {
     public RestoreSnapshotFuture(
         final HBaseAdmin admin,
-        final HBaseProtos.SnapshotDescription snapshot,
+        final SnapshotProtos.SnapshotDescription snapshot,
         final TableName tableName,
         final RestoreSnapshotResponse response) {
       super(admin, tableName,
@@ -2798,12 +2811,12 @@ public class HBaseAdmin implements Admin {
         getRpcControllerFactory()) {
       @Override
       protected List<SnapshotDescription> rpcCall() throws Exception {
-        List<HBaseProtos.SnapshotDescription> snapshotsList = master
+        List<SnapshotProtos.SnapshotDescription> snapshotsList = master
             .getCompletedSnapshots(getRpcController(),
                 GetCompletedSnapshotsRequest.newBuilder().build())
             .getSnapshotsList();
         List<SnapshotDescription> result = new ArrayList<>(snapshotsList.size());
-        for (HBaseProtos.SnapshotDescription snapshot : snapshotsList) {
+        for (SnapshotProtos.SnapshotDescription snapshot : snapshotsList) {
           result.add(ProtobufUtil.createSnapshotDesc(snapshot));
         }
         return result;
@@ -2866,7 +2879,7 @@ public class HBaseAdmin implements Admin {
       protected Void rpcCall() throws Exception {
         master.deleteSnapshot(getRpcController(),
           DeleteSnapshotRequest.newBuilder().setSnapshot(
-                HBaseProtos.SnapshotDescription.newBuilder().setName(snapshotName).build())
+                SnapshotProtos.SnapshotDescription.newBuilder().setName(snapshotName).build())
               .build()
         );
         return null;
@@ -4122,7 +4135,7 @@ public class HBaseAdmin implements Admin {
   /**
    * Decide whether the table need replicate to the peer cluster according to the peer config
    * @param table name of the table
-   * @param peerConfig config for the peer
+   * @param peer config for the peer
    * @return true if the table need replicate to the peer cluster
    */
   private boolean needToReplicate(TableName table, ReplicationPeerDescription peer) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
new file mode 100644
index 0000000..03798ad
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.security.access;
+
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.security.access.Permission.Action;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Convert protobuf objects in AccessControl.proto under hbase-protocol-shaded to user-oriented
+ * objects and vice versa. <br>
+ *
+ * In HBASE-15638, we create a hbase-protocol-shaded module for upgrading protobuf version to 3.x,
+ * but there are still some coprocessor endpoints(such as AccessControl, Authentication,
+ * MulitRowMutation) which depend on hbase-protocol module for CPEP compatibility. In fact, we use
+ * PB objects in AccessControl.proto under hbase-protocol for access control logic and use shaded
+ * AccessControl.proto only for serializing/deserializing permissions of .snapshotinfo.
+ */
+@InterfaceAudience.Private
+public class ShadedAccessControlUtil {
+
+  /**
+   * Convert a client user permission to a user permission shaded proto.
+   */
+  public static
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action
+      toPermissionAction(Permission.Action action) {
+    switch (action) {
+    case READ:
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.READ;
+    case WRITE:
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.WRITE;
+    case EXEC:
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.EXEC;
+    case CREATE:
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.CREATE;
+    case ADMIN:
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action.ADMIN;
+    }
+    throw new IllegalArgumentException("Unknown action value " + action.name());
+  }
+
+  /**
+   * Convert a Permission.Action shaded proto to a client Permission.Action object.
+   */
+  public static Permission.Action toPermissionAction(
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action action) {
+    switch (action) {
+    case READ:
+      return Permission.Action.READ;
+    case WRITE:
+      return Permission.Action.WRITE;
+    case EXEC:
+      return Permission.Action.EXEC;
+    case CREATE:
+      return Permission.Action.CREATE;
+    case ADMIN:
+      return Permission.Action.ADMIN;
+    }
+    throw new IllegalArgumentException("Unknown action value " + action.name());
+  }
+
+  /**
+   * Converts a list of Permission.Action shaded proto to a list of client Permission.Action
+   * objects.
+   * @param protoActions the list of shaded protobuf Actions
+   * @return the converted list of Actions
+   */
+  public static List<Permission.Action> toPermissionActions(
+      List<org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action> protoActions) {
+    List<Permission.Action> actions = new ArrayList<>(protoActions.size());
+    for (org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Action a : protoActions) {
+      actions.add(toPermissionAction(a));
+    }
+    return actions;
+  }
+
+  public static org.apache.hadoop.hbase.TableName toTableName(HBaseProtos.TableName tableNamePB) {
+    return org.apache.hadoop.hbase.TableName.valueOf(
+      tableNamePB.getNamespace().asReadOnlyByteBuffer(),
+      tableNamePB.getQualifier().asReadOnlyByteBuffer());
+  }
+
+  public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
+    return HBaseProtos.TableName.newBuilder()
+        .setNamespace(ByteString.copyFrom(tableName.getNamespace()))
+        .setQualifier(ByteString.copyFrom(tableName.getQualifier())).build();
+  }
+
+  /**
+   * Converts a Permission shaded proto to a client TablePermission object.
+   * @param proto the protobuf Permission
+   * @return the converted TablePermission
+   */
+  public static TablePermission toTablePermission(AccessControlProtos.Permission proto) {
+
+    if (proto.getType() == AccessControlProtos.Permission.Type.Global) {
+      AccessControlProtos.GlobalPermission perm = proto.getGlobalPermission();
+      List<Action> actions = toPermissionActions(perm.getActionList());
+
+      return new TablePermission(null, null, null,
+          actions.toArray(new Permission.Action[actions.size()]));
+    }
+    if (proto.getType() == AccessControlProtos.Permission.Type.Namespace) {
+      AccessControlProtos.NamespacePermission perm = proto.getNamespacePermission();
+      List<Permission.Action> actions = toPermissionActions(perm.getActionList());
+
+      if (!proto.hasNamespacePermission()) {
+        throw new IllegalStateException("Namespace must not be empty in NamespacePermission");
+      }
+      String namespace = perm.getNamespaceName().toStringUtf8();
+      return new TablePermission(namespace, actions.toArray(new Permission.Action[actions.size()]));
+    }
+    if (proto.getType() == AccessControlProtos.Permission.Type.Table) {
+      AccessControlProtos.TablePermission perm = proto.getTablePermission();
+      List<Permission.Action> actions = toPermissionActions(perm.getActionList());
+
+      byte[] qualifier = null;
+      byte[] family = null;
+      TableName table = null;
+
+      if (!perm.hasTableName()) {
+        throw new IllegalStateException("TableName cannot be empty");
+      }
+      table = toTableName(perm.getTableName());
+
+      if (perm.hasFamily()) family = perm.getFamily().toByteArray();
+      if (perm.hasQualifier()) qualifier = perm.getQualifier().toByteArray();
+
+      return new TablePermission(table, family, qualifier,
+          actions.toArray(new Permission.Action[actions.size()]));
+    }
+    throw new IllegalStateException("Unrecognize Perm Type: " + proto.getType());
+  }
+
+  /**
+   * Convert a client Permission to a Permission shaded proto
+   * @param perm the client Permission
+   * @return the protobuf Permission
+   */
+  public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission
+      toPermission(Permission perm) {
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Builder ret =
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission
+            .newBuilder();
+    if (perm instanceof TablePermission) {
+      TablePermission tablePerm = (TablePermission) perm;
+      if (tablePerm.hasNamespace()) {
+        ret.setType(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.Namespace);
+
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder builder =
+            org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission
+                .newBuilder();
+        builder.setNamespaceName(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+            .copyFromUtf8(tablePerm.getNamespace()));
+        Permission.Action[] actions = perm.getActions();
+        if (actions != null) {
+          for (Permission.Action a : actions) {
+            builder.addAction(toPermissionAction(a));
+          }
+        }
+        ret.setNamespacePermission(builder);
+        return ret.build();
+      } else if (tablePerm.hasTable()) {
+        ret.setType(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.Table);
+
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder builder =
+            org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission
+                .newBuilder();
+        builder.setTableName(toProtoTableName(tablePerm.getTableName()));
+        if (tablePerm.hasFamily()) {
+          builder.setFamily(ByteString.copyFrom(tablePerm.getFamily()));
+        }
+        if (tablePerm.hasQualifier()) {
+          builder.setQualifier(ByteString.copyFrom(tablePerm.getQualifier()));
+        }
+        Permission.Action actions[] = perm.getActions();
+        if (actions != null) {
+          for (Permission.Action a : actions) {
+            builder.addAction(toPermissionAction(a));
+          }
+        }
+        ret.setTablePermission(builder);
+        return ret.build();
+      }
+    }
+
+    ret.setType(
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type.Global);
+
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder builder =
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission
+            .newBuilder();
+    Permission.Action actions[] = perm.getActions();
+    if (actions != null) {
+      for (Permission.Action a : actions) {
+        builder.addAction(toPermissionAction(a));
+      }
+    }
+    ret.setGlobalPermission(builder);
+    return ret.build();
+  }
+
+  /**
+   * Convert a shaded protobuf UserTablePermissions to a ListMultimap&lt;String, TablePermission&gt;
+   * where key is username.
+   * @param proto the protobuf UserPermission
+   * @return the converted UserPermission
+   */
+  public static ListMultimap<String, TablePermission> toUserTablePermissions(
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions proto) {
+    ListMultimap<String, TablePermission> perms = ArrayListMultimap.create();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions userPerm;
+    for (int i = 0; i < proto.getUserPermissionsCount(); i++) {
+      userPerm = proto.getUserPermissions(i);
+      for (int j = 0; j < userPerm.getPermissionsCount(); j++) {
+        TablePermission tablePerm = toTablePermission(userPerm.getPermissions(j));
+        perms.put(userPerm.getUser().toStringUtf8(), tablePerm);
+      }
+    }
+    return perms;
+  }
+
+  /**
+   * Convert a ListMultimap&lt;String, TablePermission&gt; where key is username to a shaded
+   * protobuf UserPermission
+   * @param perm the list of user and table permissions
+   * @return the protobuf UserTablePermissions
+   */
+  public static
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions
+      toUserTablePermissions(ListMultimap<String, TablePermission> perm) {
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder builder =
+        org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions
+            .newBuilder();
+    for (Map.Entry<String, Collection<TablePermission>> entry : perm.asMap().entrySet()) {
+      org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions.Builder userPermBuilder =
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions
+              .newBuilder();
+      userPermBuilder.setUser(ByteString.copyFromUtf8(entry.getKey()));
+      for (TablePermission tablePerm : entry.getValue()) {
+        userPermBuilder.addPermissions(toPermission(tablePerm));
+      }
+      builder.addUserPermissions(userPermBuilder.build());
+    }
+    return builder.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
index 4804b30..8e88a8c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
@@ -155,6 +155,10 @@ public class TablePermission extends Permission {
     return table;
   }
 
+  public void setTableName(TableName table) {
+    this.table = table;
+  }
+
   public boolean hasFamily() {
     return family != null;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 2e62deb..f8ea0a5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -35,7 +35,6 @@ import java.util.NavigableSet;
 import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -164,6 +163,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -2954,9 +2954,9 @@ public final class ProtobufUtil {
    * @param type the SnapshotDescription type
    * @return the protobuf SnapshotDescription type
    */
-  public static HBaseProtos.SnapshotDescription.Type
+  public static SnapshotProtos.SnapshotDescription.Type
       createProtosSnapShotDescType(SnapshotType type) {
-    return HBaseProtos.SnapshotDescription.Type.valueOf(type.name());
+    return SnapshotProtos.SnapshotDescription.Type.valueOf(type.name());
   }
 
   /**
@@ -2965,9 +2965,9 @@ public final class ProtobufUtil {
    * @param snapshotDesc string representing the snapshot description type
    * @return the protobuf SnapshotDescription type
    */
-  public static HBaseProtos.SnapshotDescription.Type
+  public static SnapshotProtos.SnapshotDescription.Type
       createProtosSnapShotDescType(String snapshotDesc) {
-    return HBaseProtos.SnapshotDescription.Type.valueOf(snapshotDesc.toUpperCase(Locale.ROOT));
+    return SnapshotProtos.SnapshotDescription.Type.valueOf(snapshotDesc.toUpperCase(Locale.ROOT));
   }
 
   /**
@@ -2976,7 +2976,7 @@ public final class ProtobufUtil {
    * @param type the snapshot description type
    * @return the protobuf SnapshotDescription type
    */
-  public static SnapshotType createSnapshotType(HBaseProtos.SnapshotDescription.Type type) {
+  public static SnapshotType createSnapshotType(SnapshotProtos.SnapshotDescription.Type type) {
     return SnapshotType.valueOf(type.toString());
   }
 
@@ -2986,9 +2986,9 @@ public final class ProtobufUtil {
    * @param snapshotDesc the POJO SnapshotDescription
    * @return the protobuf SnapshotDescription
    */
-  public static HBaseProtos.SnapshotDescription
+  public static SnapshotProtos.SnapshotDescription
       createHBaseProtosSnapshotDesc(SnapshotDescription snapshotDesc) {
-    HBaseProtos.SnapshotDescription.Builder builder = HBaseProtos.SnapshotDescription.newBuilder();
+    SnapshotProtos.SnapshotDescription.Builder builder = SnapshotProtos.SnapshotDescription.newBuilder();
     if (snapshotDesc.getTableName() != null) {
       builder.setTable(snapshotDesc.getTableNameAsString());
     }
@@ -3005,7 +3005,7 @@ public final class ProtobufUtil {
       builder.setVersion(snapshotDesc.getVersion());
     }
     builder.setType(ProtobufUtil.createProtosSnapShotDescType(snapshotDesc.getType()));
-    HBaseProtos.SnapshotDescription snapshot = builder.build();
+    SnapshotProtos.SnapshotDescription snapshot = builder.build();
     return snapshot;
   }
 
@@ -3017,7 +3017,7 @@ public final class ProtobufUtil {
    * @return the POJO SnapshotDescription
    */
   public static SnapshotDescription
-      createSnapshotDesc(HBaseProtos.SnapshotDescription snapshotDesc) {
+      createSnapshotDesc(SnapshotProtos.SnapshotDescription snapshotDesc) {
     return new SnapshotDescription(snapshotDesc.getName(),
         snapshotDesc.hasTable() ? TableName.valueOf(snapshotDesc.getTable()) : null,
         createSnapshotType(snapshotDesc.getType()), snapshotDesc.getOwner(),

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
index 88b6bec..3e83f3e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ClientSnapshotDescriptionUtils.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.snapshot;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -36,7 +37,7 @@ public class ClientSnapshotDescriptionUtils {
    * @throws IllegalArgumentException if the name of the snapshot or the name of the table to
    *           snapshot are not valid names.
    */
-  public static void assertSnapshotRequestIsValid(HBaseProtos.SnapshotDescription snapshot)
+  public static void assertSnapshotRequestIsValid(SnapshotProtos.SnapshotDescription snapshot)
       throws IllegalArgumentException {
     // make sure the snapshot name is valid
     TableName.isLegalTableQualifierName(Bytes.toBytes(snapshot.getName()), true);
@@ -57,7 +58,7 @@ public class ClientSnapshotDescriptionUtils {
    * @param ssd
    * @return Single line string with a summary of the snapshot parameters
    */
-  public static String toString(HBaseProtos.SnapshotDescription ssd) {
+  public static String toString(SnapshotProtos.SnapshotDescription ssd) {
     if (ssd == null) {
       return null;
     }


[5/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
index 3f8a65b..d7bbd05 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
@@ -1510,6 +1510,10 @@ public final class MasterProcedureProtos {
      * <code>CLONE_SNAPSHOT_POST_OPERATION = 6;</code>
      */
     CLONE_SNAPSHOT_POST_OPERATION(6),
+    /**
+     * <code>CLONE_SNAPHOST_RESTORE_ACL = 7;</code>
+     */
+    CLONE_SNAPHOST_RESTORE_ACL(7),
     ;
 
     /**
@@ -1536,6 +1540,10 @@ public final class MasterProcedureProtos {
      * <code>CLONE_SNAPSHOT_POST_OPERATION = 6;</code>
      */
     public static final int CLONE_SNAPSHOT_POST_OPERATION_VALUE = 6;
+    /**
+     * <code>CLONE_SNAPHOST_RESTORE_ACL = 7;</code>
+     */
+    public static final int CLONE_SNAPHOST_RESTORE_ACL_VALUE = 7;
 
 
     public final int getNumber() {
@@ -1558,6 +1566,7 @@ public final class MasterProcedureProtos {
         case 4: return CLONE_SNAPSHOT_ASSIGN_REGIONS;
         case 5: return CLONE_SNAPSHOT_UPDATE_DESC_CACHE;
         case 6: return CLONE_SNAPSHOT_POST_OPERATION;
+        case 7: return CLONE_SNAPHOST_RESTORE_ACL;
         default: return null;
       }
     }
@@ -1628,6 +1637,10 @@ public final class MasterProcedureProtos {
      * <code>RESTORE_SNAPSHOT_UPDATE_META = 4;</code>
      */
     RESTORE_SNAPSHOT_UPDATE_META(4),
+    /**
+     * <code>RESTORE_SNAPSHOT_RESTORE_ACL = 5;</code>
+     */
+    RESTORE_SNAPSHOT_RESTORE_ACL(5),
     ;
 
     /**
@@ -1646,6 +1659,10 @@ public final class MasterProcedureProtos {
      * <code>RESTORE_SNAPSHOT_UPDATE_META = 4;</code>
      */
     public static final int RESTORE_SNAPSHOT_UPDATE_META_VALUE = 4;
+    /**
+     * <code>RESTORE_SNAPSHOT_RESTORE_ACL = 5;</code>
+     */
+    public static final int RESTORE_SNAPSHOT_RESTORE_ACL_VALUE = 5;
 
 
     public final int getNumber() {
@@ -1666,6 +1683,7 @@ public final class MasterProcedureProtos {
         case 2: return RESTORE_SNAPSHOT_UPDATE_TABLE_DESCRIPTOR;
         case 3: return RESTORE_SNAPSHOT_WRITE_FS_LAYOUT;
         case 4: return RESTORE_SNAPSHOT_UPDATE_META;
+        case 5: return RESTORE_SNAPSHOT_RESTORE_ACL;
         default: return null;
       }
     }
@@ -15612,11 +15630,11 @@ public final class MasterProcedureProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
 
     /**
      * <code>required .hbase.pb.TableSchema table_schema = 3;</code>
@@ -15737,11 +15755,11 @@ public final class MasterProcedureProtos {
               break;
             }
             case 18: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000002) == 0x00000002)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -15833,7 +15851,7 @@ public final class MasterProcedureProtos {
     }
 
     public static final int SNAPSHOT_FIELD_NUMBER = 2;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
@@ -15843,14 +15861,14 @@ public final class MasterProcedureProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     public static final int TABLE_SCHEMA_FIELD_NUMBER = 3;
@@ -16598,9 +16616,9 @@ public final class MasterProcedureProtos {
         return userInfoBuilder_;
       }
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
@@ -16610,9 +16628,9 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -16620,7 +16638,7 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -16637,7 +16655,7 @@ public final class MasterProcedureProtos {
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -16650,13 +16668,13 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000002) == 0x00000002) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -16683,7 +16701,7 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000002;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -16691,23 +16709,23 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -17386,11 +17404,11 @@ public final class MasterProcedureProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
 
     /**
      * <code>required .hbase.pb.TableSchema modified_table_schema = 3;</code>
@@ -17561,11 +17579,11 @@ public final class MasterProcedureProtos {
               break;
             }
             case 18: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000002) == 0x00000002)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -17681,7 +17699,7 @@ public final class MasterProcedureProtos {
     }
 
     public static final int SNAPSHOT_FIELD_NUMBER = 2;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
@@ -17691,14 +17709,14 @@ public final class MasterProcedureProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     public static final int MODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 3;
@@ -18648,9 +18666,9 @@ public final class MasterProcedureProtos {
         return userInfoBuilder_;
       }
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
@@ -18660,9 +18678,9 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -18670,7 +18688,7 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -18687,7 +18705,7 @@ public final class MasterProcedureProtos {
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -18700,13 +18718,13 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000002) == 0x00000002) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -18733,7 +18751,7 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000002;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -18741,23 +18759,23 @@ public final class MasterProcedureProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -24058,208 +24076,210 @@ public final class MasterProcedureProtos {
   static {
     java.lang.String[] descriptorData = {
       "\n\025MasterProcedure.proto\022\010hbase.pb\032\013HBase" +
-      ".proto\032\tRPC.proto\"\234\001\n\024CreateTableStateDa" +
-      "ta\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInf" +
-      "ormation\022+\n\014table_schema\030\002 \002(\0132\025.hbase.p" +
-      "b.TableSchema\022)\n\013region_info\030\003 \003(\0132\024.hba" +
-      "se.pb.RegionInfo\"\332\001\n\024ModifyTableStateDat" +
-      "a\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInfo" +
-      "rmation\0226\n\027unmodified_table_schema\030\002 \001(\013" +
-      "2\025.hbase.pb.TableSchema\0224\n\025modified_tabl" +
-      "e_schema\030\003 \002(\0132\025.hbase.pb.TableSchema\022&\n",
-      "\036delete_column_family_in_modify\030\004 \002(\010\"\340\001" +
-      "\n\026TruncateTableStateData\022,\n\tuser_info\030\001 " +
-      "\002(\0132\031.hbase.pb.UserInformation\022\027\n\017preser" +
-      "ve_splits\030\002 \002(\010\022\'\n\ntable_name\030\003 \001(\0132\023.hb" +
-      "ase.pb.TableName\022+\n\014table_schema\030\004 \001(\0132\025" +
-      ".hbase.pb.TableSchema\022)\n\013region_info\030\005 \003" +
-      "(\0132\024.hbase.pb.RegionInfo\"\230\001\n\024DeleteTable" +
-      "StateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb." +
-      "UserInformation\022\'\n\ntable_name\030\002 \002(\0132\023.hb" +
-      "ase.pb.TableName\022)\n\013region_info\030\003 \003(\0132\024.",
-      "hbase.pb.RegionInfo\"W\n\030CreateNamespaceSt" +
-      "ateData\022;\n\024namespace_descriptor\030\001 \002(\0132\035." +
-      "hbase.pb.NamespaceDescriptor\"\237\001\n\030ModifyN" +
-      "amespaceStateData\022;\n\024namespace_descripto" +
-      "r\030\001 \002(\0132\035.hbase.pb.NamespaceDescriptor\022F" +
-      "\n\037unmodified_namespace_descriptor\030\002 \001(\0132" +
-      "\035.hbase.pb.NamespaceDescriptor\"o\n\030Delete" +
-      "NamespaceStateData\022\026\n\016namespace_name\030\001 \002" +
-      "(\t\022;\n\024namespace_descriptor\030\002 \001(\0132\035.hbase" +
-      ".pb.NamespaceDescriptor\"\344\001\n\030AddColumnFam",
-      "ilyStateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase." +
-      "pb.UserInformation\022\'\n\ntable_name\030\002 \002(\0132\023" +
-      ".hbase.pb.TableName\0229\n\023columnfamily_sche" +
-      "ma\030\003 \002(\0132\034.hbase.pb.ColumnFamilySchema\0226" +
-      "\n\027unmodified_table_schema\030\004 \001(\0132\025.hbase." +
-      "pb.TableSchema\"\347\001\n\033ModifyColumnFamilySta" +
-      "teData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.Use" +
-      "rInformation\022\'\n\ntable_name\030\002 \002(\0132\023.hbase" +
-      ".pb.TableName\0229\n\023columnfamily_schema\030\003 \002" +
-      "(\0132\034.hbase.pb.ColumnFamilySchema\0226\n\027unmo",
-      "dified_table_schema\030\004 \001(\0132\025.hbase.pb.Tab" +
-      "leSchema\"\307\001\n\033DeleteColumnFamilyStateData" +
+      ".proto\032\tRPC.proto\032\016Snapshot.proto\"\234\001\n\024Cr" +
+      "eateTableStateData\022,\n\tuser_info\030\001 \002(\0132\031." +
+      "hbase.pb.UserInformation\022+\n\014table_schema" +
+      "\030\002 \002(\0132\025.hbase.pb.TableSchema\022)\n\013region_" +
+      "info\030\003 \003(\0132\024.hbase.pb.RegionInfo\"\332\001\n\024Mod" +
+      "ifyTableStateData\022,\n\tuser_info\030\001 \002(\0132\031.h" +
+      "base.pb.UserInformation\0226\n\027unmodified_ta" +
+      "ble_schema\030\002 \001(\0132\025.hbase.pb.TableSchema\022" +
+      "4\n\025modified_table_schema\030\003 \002(\0132\025.hbase.p",
+      "b.TableSchema\022&\n\036delete_column_family_in" +
+      "_modify\030\004 \002(\010\"\340\001\n\026TruncateTableStateData" +
       "\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInfor" +
-      "mation\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb.Ta" +
-      "bleName\022\031\n\021columnfamily_name\030\003 \002(\014\0226\n\027un" +
-      "modified_table_schema\030\004 \001(\0132\025.hbase.pb.T" +
-      "ableSchema\"\215\001\n\024EnableTableStateData\022,\n\tu" +
-      "ser_info\030\001 \002(\0132\031.hbase.pb.UserInformatio" +
-      "n\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb.TableNa" +
-      "me\022\036\n\026skip_table_state_check\030\003 \002(\010\"\216\001\n\025D",
-      "isableTableStateData\022,\n\tuser_info\030\001 \002(\0132" +
-      "\031.hbase.pb.UserInformation\022\'\n\ntable_name" +
-      "\030\002 \002(\0132\023.hbase.pb.TableName\022\036\n\026skip_tabl" +
-      "e_state_check\030\003 \002(\010\"u\n\037RestoreParentToCh" +
-      "ildRegionsPair\022\032\n\022parent_region_name\030\001 \002" +
-      "(\t\022\032\n\022child1_region_name\030\002 \002(\t\022\032\n\022child2" +
-      "_region_name\030\003 \002(\t\"\245\002\n\026CloneSnapshotStat" +
-      "eData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.User" +
-      "Information\022/\n\010snapshot\030\002 \002(\0132\035.hbase.pb" +
-      ".SnapshotDescription\022+\n\014table_schema\030\003 \002",
-      "(\0132\025.hbase.pb.TableSchema\022)\n\013region_info" +
-      "\030\004 \003(\0132\024.hbase.pb.RegionInfo\022T\n!parent_t" +
-      "o_child_regions_pair_list\030\005 \003(\0132).hbase." +
-      "pb.RestoreParentToChildRegionsPair\"\245\003\n\030R" +
-      "estoreSnapshotStateData\022,\n\tuser_info\030\001 \002" +
-      "(\0132\031.hbase.pb.UserInformation\022/\n\010snapsho" +
-      "t\030\002 \002(\0132\035.hbase.pb.SnapshotDescription\0224" +
-      "\n\025modified_table_schema\030\003 \002(\0132\025.hbase.pb" +
-      ".TableSchema\0225\n\027region_info_for_restore\030" +
-      "\004 \003(\0132\024.hbase.pb.RegionInfo\0224\n\026region_in",
-      "fo_for_remove\030\005 \003(\0132\024.hbase.pb.RegionInf" +
-      "o\0221\n\023region_info_for_add\030\006 \003(\0132\024.hbase.p" +
-      "b.RegionInfo\022T\n!parent_to_child_regions_" +
-      "pair_list\030\007 \003(\0132).hbase.pb.RestoreParent" +
-      "ToChildRegionsPair\"\300\001\n\032MergeTableRegions" +
-      "StateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb." +
-      "UserInformation\022)\n\013region_info\030\002 \003(\0132\024.h" +
-      "base.pb.RegionInfo\0220\n\022merged_region_info" +
-      "\030\003 \002(\0132\024.hbase.pb.RegionInfo\022\027\n\010forcible" +
-      "\030\004 \001(\010:\005false\"\254\001\n\031SplitTableRegionStateD",
-      "ata\022,\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserIn" +
-      "formation\0220\n\022parent_region_info\030\002 \002(\0132\024." +
-      "hbase.pb.RegionInfo\022/\n\021child_region_info" +
-      "\030\003 \003(\0132\024.hbase.pb.RegionInfo\"\201\002\n\024ServerC" +
-      "rashStateData\022)\n\013server_name\030\001 \002(\0132\024.hba" +
-      "se.pb.ServerName\022\036\n\026distributed_log_repl" +
-      "ay\030\002 \001(\010\0227\n\031regions_on_crashed_server\030\003 " +
-      "\003(\0132\024.hbase.pb.RegionInfo\022.\n\020regions_ass" +
-      "igned\030\004 \003(\0132\024.hbase.pb.RegionInfo\022\025\n\rcar" +
-      "rying_meta\030\005 \001(\010\022\036\n\020should_split_wal\030\006 \001",
-      "(\010:\004true*\330\001\n\020CreateTableState\022\036\n\032CREATE_" +
-      "TABLE_PRE_OPERATION\020\001\022 \n\034CREATE_TABLE_WR" +
-      "ITE_FS_LAYOUT\020\002\022\034\n\030CREATE_TABLE_ADD_TO_M" +
-      "ETA\020\003\022\037\n\033CREATE_TABLE_ASSIGN_REGIONS\020\004\022\"" +
-      "\n\036CREATE_TABLE_UPDATE_DESC_CACHE\020\005\022\037\n\033CR" +
-      "EATE_TABLE_POST_OPERATION\020\006*\207\002\n\020ModifyTa" +
-      "bleState\022\030\n\024MODIFY_TABLE_PREPARE\020\001\022\036\n\032MO" +
-      "DIFY_TABLE_PRE_OPERATION\020\002\022(\n$MODIFY_TAB" +
-      "LE_UPDATE_TABLE_DESCRIPTOR\020\003\022&\n\"MODIFY_T" +
-      "ABLE_REMOVE_REPLICA_COLUMN\020\004\022!\n\035MODIFY_T",
-      "ABLE_DELETE_FS_LAYOUT\020\005\022\037\n\033MODIFY_TABLE_" +
-      "POST_OPERATION\020\006\022#\n\037MODIFY_TABLE_REOPEN_" +
-      "ALL_REGIONS\020\007*\212\002\n\022TruncateTableState\022 \n\034" +
-      "TRUNCATE_TABLE_PRE_OPERATION\020\001\022#\n\037TRUNCA" +
-      "TE_TABLE_REMOVE_FROM_META\020\002\022\"\n\036TRUNCATE_" +
-      "TABLE_CLEAR_FS_LAYOUT\020\003\022#\n\037TRUNCATE_TABL" +
-      "E_CREATE_FS_LAYOUT\020\004\022\036\n\032TRUNCATE_TABLE_A" +
-      "DD_TO_META\020\005\022!\n\035TRUNCATE_TABLE_ASSIGN_RE" +
-      "GIONS\020\006\022!\n\035TRUNCATE_TABLE_POST_OPERATION" +
-      "\020\007*\337\001\n\020DeleteTableState\022\036\n\032DELETE_TABLE_",
-      "PRE_OPERATION\020\001\022!\n\035DELETE_TABLE_REMOVE_F" +
-      "ROM_META\020\002\022 \n\034DELETE_TABLE_CLEAR_FS_LAYO" +
-      "UT\020\003\022\"\n\036DELETE_TABLE_UPDATE_DESC_CACHE\020\004" +
-      "\022!\n\035DELETE_TABLE_UNASSIGN_REGIONS\020\005\022\037\n\033D" +
-      "ELETE_TABLE_POST_OPERATION\020\006*\320\001\n\024CreateN" +
-      "amespaceState\022\034\n\030CREATE_NAMESPACE_PREPAR" +
-      "E\020\001\022%\n!CREATE_NAMESPACE_CREATE_DIRECTORY" +
-      "\020\002\022)\n%CREATE_NAMESPACE_INSERT_INTO_NS_TA" +
-      "BLE\020\003\022\036\n\032CREATE_NAMESPACE_UPDATE_ZK\020\004\022(\n" +
-      "$CREATE_NAMESPACE_SET_NAMESPACE_QUOTA\020\005*",
-      "z\n\024ModifyNamespaceState\022\034\n\030MODIFY_NAMESP" +
-      "ACE_PREPARE\020\001\022$\n MODIFY_NAMESPACE_UPDATE" +
-      "_NS_TABLE\020\002\022\036\n\032MODIFY_NAMESPACE_UPDATE_Z" +
-      "K\020\003*\332\001\n\024DeleteNamespaceState\022\034\n\030DELETE_N" +
-      "AMESPACE_PREPARE\020\001\022)\n%DELETE_NAMESPACE_D" +
-      "ELETE_FROM_NS_TABLE\020\002\022#\n\037DELETE_NAMESPAC" +
-      "E_REMOVE_FROM_ZK\020\003\022\'\n#DELETE_NAMESPACE_D" +
-      "ELETE_DIRECTORIES\020\004\022+\n\'DELETE_NAMESPACE_" +
-      "REMOVE_NAMESPACE_QUOTA\020\005*\331\001\n\024AddColumnFa" +
-      "milyState\022\035\n\031ADD_COLUMN_FAMILY_PREPARE\020\001",
-      "\022#\n\037ADD_COLUMN_FAMILY_PRE_OPERATION\020\002\022-\n" +
-      ")ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPT" +
-      "OR\020\003\022$\n ADD_COLUMN_FAMILY_POST_OPERATION" +
-      "\020\004\022(\n$ADD_COLUMN_FAMILY_REOPEN_ALL_REGIO" +
-      "NS\020\005*\353\001\n\027ModifyColumnFamilyState\022 \n\034MODI" +
-      "FY_COLUMN_FAMILY_PREPARE\020\001\022&\n\"MODIFY_COL" +
-      "UMN_FAMILY_PRE_OPERATION\020\002\0220\n,MODIFY_COL" +
-      "UMN_FAMILY_UPDATE_TABLE_DESCRIPTOR\020\003\022\'\n#" +
-      "MODIFY_COLUMN_FAMILY_POST_OPERATION\020\004\022+\n" +
-      "\'MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS",
-      "\020\005*\226\002\n\027DeleteColumnFamilyState\022 \n\034DELETE" +
-      "_COLUMN_FAMILY_PREPARE\020\001\022&\n\"DELETE_COLUM" +
-      "N_FAMILY_PRE_OPERATION\020\002\0220\n,DELETE_COLUM" +
-      "N_FAMILY_UPDATE_TABLE_DESCRIPTOR\020\003\022)\n%DE" +
-      "LETE_COLUMN_FAMILY_DELETE_FS_LAYOUT\020\004\022\'\n" +
-      "#DELETE_COLUMN_FAMILY_POST_OPERATION\020\005\022+" +
-      "\n\'DELETE_COLUMN_FAMILY_REOPEN_ALL_REGION" +
-      "S\020\006*\350\001\n\020EnableTableState\022\030\n\024ENABLE_TABLE" +
-      "_PREPARE\020\001\022\036\n\032ENABLE_TABLE_PRE_OPERATION" +
-      "\020\002\022)\n%ENABLE_TABLE_SET_ENABLING_TABLE_ST",
-      "ATE\020\003\022$\n ENABLE_TABLE_MARK_REGIONS_ONLIN" +
-      "E\020\004\022(\n$ENABLE_TABLE_SET_ENABLED_TABLE_ST" +
-      "ATE\020\005\022\037\n\033ENABLE_TABLE_POST_OPERATION\020\006*\362" +
-      "\001\n\021DisableTableState\022\031\n\025DISABLE_TABLE_PR" +
-      "EPARE\020\001\022\037\n\033DISABLE_TABLE_PRE_OPERATION\020\002" +
-      "\022+\n\'DISABLE_TABLE_SET_DISABLING_TABLE_ST" +
-      "ATE\020\003\022&\n\"DISABLE_TABLE_MARK_REGIONS_OFFL" +
-      "INE\020\004\022*\n&DISABLE_TABLE_SET_DISABLED_TABL" +
-      "E_STATE\020\005\022 \n\034DISABLE_TABLE_POST_OPERATIO" +
-      "N\020\006*\346\001\n\022CloneSnapshotState\022 \n\034CLONE_SNAP",
-      "SHOT_PRE_OPERATION\020\001\022\"\n\036CLONE_SNAPSHOT_W" +
-      "RITE_FS_LAYOUT\020\002\022\036\n\032CLONE_SNAPSHOT_ADD_T" +
-      "O_META\020\003\022!\n\035CLONE_SNAPSHOT_ASSIGN_REGION" +
-      "S\020\004\022$\n CLONE_SNAPSHOT_UPDATE_DESC_CACHE\020" +
-      "\005\022!\n\035CLONE_SNAPSHOT_POST_OPERATION\020\006*\260\001\n" +
-      "\024RestoreSnapshotState\022\"\n\036RESTORE_SNAPSHO" +
-      "T_PRE_OPERATION\020\001\022,\n(RESTORE_SNAPSHOT_UP" +
-      "DATE_TABLE_DESCRIPTOR\020\002\022$\n RESTORE_SNAPS" +
-      "HOT_WRITE_FS_LAYOUT\020\003\022 \n\034RESTORE_SNAPSHO" +
-      "T_UPDATE_META\020\004*\376\003\n\026MergeTableRegionsSta",
-      "te\022\037\n\033MERGE_TABLE_REGIONS_PREPARE\020\001\022.\n*M" +
-      "ERGE_TABLE_REGIONS_MOVE_REGION_TO_SAME_R" +
-      "S\020\002\022+\n\'MERGE_TABLE_REGIONS_PRE_MERGE_OPE" +
-      "RATION\020\003\022/\n+MERGE_TABLE_REGIONS_SET_MERG" +
-      "ING_TABLE_STATE\020\004\022%\n!MERGE_TABLE_REGIONS" +
-      "_CLOSE_REGIONS\020\005\022,\n(MERGE_TABLE_REGIONS_" +
-      "CREATE_MERGED_REGION\020\006\0222\n.MERGE_TABLE_RE" +
-      "GIONS_PRE_MERGE_COMMIT_OPERATION\020\007\022#\n\037ME" +
-      "RGE_TABLE_REGIONS_UPDATE_META\020\010\0223\n/MERGE" +
-      "_TABLE_REGIONS_POST_MERGE_COMMIT_OPERATI",
-      "ON\020\t\022*\n&MERGE_TABLE_REGIONS_OPEN_MERGED_" +
-      "REGION\020\n\022&\n\"MERGE_TABLE_REGIONS_POST_OPE" +
-      "RATION\020\013*\304\003\n\025SplitTableRegionState\022\036\n\032SP" +
-      "LIT_TABLE_REGION_PREPARE\020\001\022$\n SPLIT_TABL" +
-      "E_REGION_PRE_OPERATION\020\002\0220\n,SPLIT_TABLE_" +
-      "REGION_SET_SPLITTING_TABLE_STATE\020\003\022*\n&SP" +
-      "LIT_TABLE_REGION_CLOSE_PARENT_REGION\020\004\022." +
-      "\n*SPLIT_TABLE_REGION_CREATE_DAUGHTER_REG" +
-      "IONS\020\005\0220\n,SPLIT_TABLE_REGION_PRE_OPERATI" +
-      "ON_BEFORE_PONR\020\006\022\"\n\036SPLIT_TABLE_REGION_U",
-      "PDATE_META\020\007\022/\n+SPLIT_TABLE_REGION_PRE_O" +
-      "PERATION_AFTER_PONR\020\010\022)\n%SPLIT_TABLE_REG" +
-      "ION_OPEN_CHILD_REGIONS\020\t\022%\n!SPLIT_TABLE_" +
-      "REGION_POST_OPERATION\020\n*\234\002\n\020ServerCrashS" +
-      "tate\022\026\n\022SERVER_CRASH_START\020\001\022\035\n\031SERVER_C" +
-      "RASH_PROCESS_META\020\002\022\034\n\030SERVER_CRASH_GET_" +
-      "REGIONS\020\003\022\036\n\032SERVER_CRASH_NO_SPLIT_LOGS\020" +
-      "\004\022\033\n\027SERVER_CRASH_SPLIT_LOGS\020\005\022#\n\037SERVER" +
-      "_CRASH_PREPARE_LOG_REPLAY\020\006\022\027\n\023SERVER_CR" +
-      "ASH_ASSIGN\020\010\022\037\n\033SERVER_CRASH_WAIT_ON_ASS",
-      "IGN\020\t\022\027\n\023SERVER_CRASH_FINISH\020dBR\n1org.ap" +
-      "ache.hadoop.hbase.shaded.protobuf.genera" +
-      "tedB\025MasterProcedureProtosH\001\210\001\001\240\001\001"
+      "mation\022\027\n\017preserve_splits\030\002 \002(\010\022\'\n\ntable" +
+      "_name\030\003 \001(\0132\023.hbase.pb.TableName\022+\n\014tabl" +
+      "e_schema\030\004 \001(\0132\025.hbase.pb.TableSchema\022)\n" +
+      "\013region_info\030\005 \003(\0132\024.hbase.pb.RegionInfo" +
+      "\"\230\001\n\024DeleteTableStateData\022,\n\tuser_info\030\001" +
+      " \002(\0132\031.hbase.pb.UserInformation\022\'\n\ntable" +
+      "_name\030\002 \002(\0132\023.hbase.pb.TableName\022)\n\013regi",
+      "on_info\030\003 \003(\0132\024.hbase.pb.RegionInfo\"W\n\030C" +
+      "reateNamespaceStateData\022;\n\024namespace_des" +
+      "criptor\030\001 \002(\0132\035.hbase.pb.NamespaceDescri" +
+      "ptor\"\237\001\n\030ModifyNamespaceStateData\022;\n\024nam" +
+      "espace_descriptor\030\001 \002(\0132\035.hbase.pb.Names" +
+      "paceDescriptor\022F\n\037unmodified_namespace_d" +
+      "escriptor\030\002 \001(\0132\035.hbase.pb.NamespaceDesc" +
+      "riptor\"o\n\030DeleteNamespaceStateData\022\026\n\016na" +
+      "mespace_name\030\001 \002(\t\022;\n\024namespace_descript" +
+      "or\030\002 \001(\0132\035.hbase.pb.NamespaceDescriptor\"",
+      "\344\001\n\030AddColumnFamilyStateData\022,\n\tuser_inf" +
+      "o\030\001 \002(\0132\031.hbase.pb.UserInformation\022\'\n\nta" +
+      "ble_name\030\002 \002(\0132\023.hbase.pb.TableName\0229\n\023c" +
+      "olumnfamily_schema\030\003 \002(\0132\034.hbase.pb.Colu" +
+      "mnFamilySchema\0226\n\027unmodified_table_schem" +
+      "a\030\004 \001(\0132\025.hbase.pb.TableSchema\"\347\001\n\033Modif" +
+      "yColumnFamilyStateData\022,\n\tuser_info\030\001 \002(" +
+      "\0132\031.hbase.pb.UserInformation\022\'\n\ntable_na" +
+      "me\030\002 \002(\0132\023.hbase.pb.TableName\0229\n\023columnf" +
+      "amily_schema\030\003 \002(\0132\034.hbase.pb.ColumnFami",
+      "lySchema\0226\n\027unmodified_table_schema\030\004 \001(" +
+      "\0132\025.hbase.pb.TableSchema\"\307\001\n\033DeleteColum" +
+      "nFamilyStateData\022,\n\tuser_info\030\001 \002(\0132\031.hb" +
+      "ase.pb.UserInformation\022\'\n\ntable_name\030\002 \002" +
+      "(\0132\023.hbase.pb.TableName\022\031\n\021columnfamily_" +
+      "name\030\003 \002(\014\0226\n\027unmodified_table_schema\030\004 " +
+      "\001(\0132\025.hbase.pb.TableSchema\"\215\001\n\024EnableTab" +
+      "leStateData\022,\n\tuser_info\030\001 \002(\0132\031.hbase.p" +
+      "b.UserInformation\022\'\n\ntable_name\030\002 \002(\0132\023." +
+      "hbase.pb.TableName\022\036\n\026skip_table_state_c",
+      "heck\030\003 \002(\010\"\216\001\n\025DisableTableStateData\022,\n\t" +
+      "user_info\030\001 \002(\0132\031.hbase.pb.UserInformati" +
+      "on\022\'\n\ntable_name\030\002 \002(\0132\023.hbase.pb.TableN" +
+      "ame\022\036\n\026skip_table_state_check\030\003 \002(\010\"u\n\037R" +
+      "estoreParentToChildRegionsPair\022\032\n\022parent" +
+      "_region_name\030\001 \002(\t\022\032\n\022child1_region_name" +
+      "\030\002 \002(\t\022\032\n\022child2_region_name\030\003 \002(\t\"\245\002\n\026C" +
+      "loneSnapshotStateData\022,\n\tuser_info\030\001 \002(\013" +
+      "2\031.hbase.pb.UserInformation\022/\n\010snapshot\030" +
+      "\002 \002(\0132\035.hbase.pb.SnapshotDescription\022+\n\014",
+      "table_schema\030\003 \002(\0132\025.hbase.pb.TableSchem" +
+      "a\022)\n\013region_info\030\004 \003(\0132\024.hbase.pb.Region" +
+      "Info\022T\n!parent_to_child_regions_pair_lis" +
+      "t\030\005 \003(\0132).hbase.pb.RestoreParentToChildR" +
+      "egionsPair\"\245\003\n\030RestoreSnapshotStateData\022" +
+      ",\n\tuser_info\030\001 \002(\0132\031.hbase.pb.UserInform" +
+      "ation\022/\n\010snapshot\030\002 \002(\0132\035.hbase.pb.Snaps" +
+      "hotDescription\0224\n\025modified_table_schema\030" +
+      "\003 \002(\0132\025.hbase.pb.TableSchema\0225\n\027region_i" +
+      "nfo_for_restore\030\004 \003(\0132\024.hbase.pb.RegionI",
+      "nfo\0224\n\026region_info_for_remove\030\005 \003(\0132\024.hb" +
+      "ase.pb.RegionInfo\0221\n\023region_info_for_add" +
+      "\030\006 \003(\0132\024.hbase.pb.RegionInfo\022T\n!parent_t" +
+      "o_child_regions_pair_list\030\007 \003(\0132).hbase." +
+      "pb.RestoreParentToChildRegionsPair\"\300\001\n\032M" +
+      "ergeTableRegionsStateData\022,\n\tuser_info\030\001" +
+      " \002(\0132\031.hbase.pb.UserInformation\022)\n\013regio" +
+      "n_info\030\002 \003(\0132\024.hbase.pb.RegionInfo\0220\n\022me" +
+      "rged_region_info\030\003 \002(\0132\024.hbase.pb.Region" +
+      "Info\022\027\n\010forcible\030\004 \001(\010:\005false\"\254\001\n\031SplitT",
+      "ableRegionStateData\022,\n\tuser_info\030\001 \002(\0132\031" +
+      ".hbase.pb.UserInformation\0220\n\022parent_regi" +
+      "on_info\030\002 \002(\0132\024.hbase.pb.RegionInfo\022/\n\021c" +
+      "hild_region_info\030\003 \003(\0132\024.hbase.pb.Region" +
+      "Info\"\201\002\n\024ServerCrashStateData\022)\n\013server_" +
+      "name\030\001 \002(\0132\024.hbase.pb.ServerName\022\036\n\026dist" +
+      "ributed_log_replay\030\002 \001(\010\0227\n\031regions_on_c" +
+      "rashed_server\030\003 \003(\0132\024.hbase.pb.RegionInf" +
+      "o\022.\n\020regions_assigned\030\004 \003(\0132\024.hbase.pb.R" +
+      "egionInfo\022\025\n\rcarrying_meta\030\005 \001(\010\022\036\n\020shou",
+      "ld_split_wal\030\006 \001(\010:\004true*\330\001\n\020CreateTable" +
+      "State\022\036\n\032CREATE_TABLE_PRE_OPERATION\020\001\022 \n" +
+      "\034CREATE_TABLE_WRITE_FS_LAYOUT\020\002\022\034\n\030CREAT" +
+      "E_TABLE_ADD_TO_META\020\003\022\037\n\033CREATE_TABLE_AS" +
+      "SIGN_REGIONS\020\004\022\"\n\036CREATE_TABLE_UPDATE_DE" +
+      "SC_CACHE\020\005\022\037\n\033CREATE_TABLE_POST_OPERATIO" +
+      "N\020\006*\207\002\n\020ModifyTableState\022\030\n\024MODIFY_TABLE" +
+      "_PREPARE\020\001\022\036\n\032MODIFY_TABLE_PRE_OPERATION" +
+      "\020\002\022(\n$MODIFY_TABLE_UPDATE_TABLE_DESCRIPT" +
+      "OR\020\003\022&\n\"MODIFY_TABLE_REMOVE_REPLICA_COLU",
+      "MN\020\004\022!\n\035MODIFY_TABLE_DELETE_FS_LAYOUT\020\005\022" +
+      "\037\n\033MODIFY_TABLE_POST_OPERATION\020\006\022#\n\037MODI" +
+      "FY_TABLE_REOPEN_ALL_REGIONS\020\007*\212\002\n\022Trunca" +
+      "teTableState\022 \n\034TRUNCATE_TABLE_PRE_OPERA" +
+      "TION\020\001\022#\n\037TRUNCATE_TABLE_REMOVE_FROM_MET" +
+      "A\020\002\022\"\n\036TRUNCATE_TABLE_CLEAR_FS_LAYOUT\020\003\022" +
+      "#\n\037TRUNCATE_TABLE_CREATE_FS_LAYOUT\020\004\022\036\n\032" +
+      "TRUNCATE_TABLE_ADD_TO_META\020\005\022!\n\035TRUNCATE" +
+      "_TABLE_ASSIGN_REGIONS\020\006\022!\n\035TRUNCATE_TABL" +
+      "E_POST_OPERATION\020\007*\337\001\n\020DeleteTableState\022",
+      "\036\n\032DELETE_TABLE_PRE_OPERATION\020\001\022!\n\035DELET" +
+      "E_TABLE_REMOVE_FROM_META\020\002\022 \n\034DELETE_TAB" +
+      "LE_CLEAR_FS_LAYOUT\020\003\022\"\n\036DELETE_TABLE_UPD" +
+      "ATE_DESC_CACHE\020\004\022!\n\035DELETE_TABLE_UNASSIG" +
+      "N_REGIONS\020\005\022\037\n\033DELETE_TABLE_POST_OPERATI" +
+      "ON\020\006*\320\001\n\024CreateNamespaceState\022\034\n\030CREATE_" +
+      "NAMESPACE_PREPARE\020\001\022%\n!CREATE_NAMESPACE_" +
+      "CREATE_DIRECTORY\020\002\022)\n%CREATE_NAMESPACE_I" +
+      "NSERT_INTO_NS_TABLE\020\003\022\036\n\032CREATE_NAMESPAC" +
+      "E_UPDATE_ZK\020\004\022(\n$CREATE_NAMESPACE_SET_NA",
+      "MESPACE_QUOTA\020\005*z\n\024ModifyNamespaceState\022" +
+      "\034\n\030MODIFY_NAMESPACE_PREPARE\020\001\022$\n MODIFY_" +
+      "NAMESPACE_UPDATE_NS_TABLE\020\002\022\036\n\032MODIFY_NA" +
+      "MESPACE_UPDATE_ZK\020\003*\332\001\n\024DeleteNamespaceS" +
+      "tate\022\034\n\030DELETE_NAMESPACE_PREPARE\020\001\022)\n%DE" +
+      "LETE_NAMESPACE_DELETE_FROM_NS_TABLE\020\002\022#\n" +
+      "\037DELETE_NAMESPACE_REMOVE_FROM_ZK\020\003\022\'\n#DE" +
+      "LETE_NAMESPACE_DELETE_DIRECTORIES\020\004\022+\n\'D" +
+      "ELETE_NAMESPACE_REMOVE_NAMESPACE_QUOTA\020\005" +
+      "*\331\001\n\024AddColumnFamilyState\022\035\n\031ADD_COLUMN_",
+      "FAMILY_PREPARE\020\001\022#\n\037ADD_COLUMN_FAMILY_PR" +
+      "E_OPERATION\020\002\022-\n)ADD_COLUMN_FAMILY_UPDAT" +
+      "E_TABLE_DESCRIPTOR\020\003\022$\n ADD_COLUMN_FAMIL" +
+      "Y_POST_OPERATION\020\004\022(\n$ADD_COLUMN_FAMILY_" +
+      "REOPEN_ALL_REGIONS\020\005*\353\001\n\027ModifyColumnFam" +
+      "ilyState\022 \n\034MODIFY_COLUMN_FAMILY_PREPARE" +
+      "\020\001\022&\n\"MODIFY_COLUMN_FAMILY_PRE_OPERATION" +
+      "\020\002\0220\n,MODIFY_COLUMN_FAMILY_UPDATE_TABLE_" +
+      "DESCRIPTOR\020\003\022\'\n#MODIFY_COLUMN_FAMILY_POS" +
+      "T_OPERATION\020\004\022+\n\'MODIFY_COLUMN_FAMILY_RE",
+      "OPEN_ALL_REGIONS\020\005*\226\002\n\027DeleteColumnFamil" +
+      "yState\022 \n\034DELETE_COLUMN_FAMILY_PREPARE\020\001" +
+      "\022&\n\"DELETE_COLUMN_FAMILY_PRE_OPERATION\020\002" +
+      "\0220\n,DELETE_COLUMN_FAMILY_UPDATE_TABLE_DE" +
+      "SCRIPTOR\020\003\022)\n%DELETE_COLUMN_FAMILY_DELET" +
+      "E_FS_LAYOUT\020\004\022\'\n#DELETE_COLUMN_FAMILY_PO" +
+      "ST_OPERATION\020\005\022+\n\'DELETE_COLUMN_FAMILY_R" +
+      "EOPEN_ALL_REGIONS\020\006*\350\001\n\020EnableTableState" +
+      "\022\030\n\024ENABLE_TABLE_PREPARE\020\001\022\036\n\032ENABLE_TAB" +
+      "LE_PRE_OPERATION\020\002\022)\n%ENABLE_TABLE_SET_E",
+      "NABLING_TABLE_STATE\020\003\022$\n ENABLE_TABLE_MA" +
+      "RK_REGIONS_ONLINE\020\004\022(\n$ENABLE_TABLE_SET_" +
+      "ENABLED_TABLE_STATE\020\005\022\037\n\033ENABLE_TABLE_PO" +
+      "ST_OPERATION\020\006*\362\001\n\021DisableTableState\022\031\n\025" +
+      "DISABLE_TABLE_PREPARE\020\001\022\037\n\033DISABLE_TABLE" +
+      "_PRE_OPERATION\020\002\022+\n\'DISABLE_TABLE_SET_DI" +
+      "SABLING_TABLE_STATE\020\003\022&\n\"DISABLE_TABLE_M" +
+      "ARK_REGIONS_OFFLINE\020\004\022*\n&DISABLE_TABLE_S" +
+      "ET_DISABLED_TABLE_STATE\020\005\022 \n\034DISABLE_TAB" +
+      "LE_POST_OPERATION\020\006*\206\002\n\022CloneSnapshotSta",
+      "te\022 \n\034CLONE_SNAPSHOT_PRE_OPERATION\020\001\022\"\n\036" +
+      "CLONE_SNAPSHOT_WRITE_FS_LAYOUT\020\002\022\036\n\032CLON" +
+      "E_SNAPSHOT_ADD_TO_META\020\003\022!\n\035CLONE_SNAPSH" +
+      "OT_ASSIGN_REGIONS\020\004\022$\n CLONE_SNAPSHOT_UP" +
+      "DATE_DESC_CACHE\020\005\022!\n\035CLONE_SNAPSHOT_POST" +
+      "_OPERATION\020\006\022\036\n\032CLONE_SNAPHOST_RESTORE_A" +
+      "CL\020\007*\322\001\n\024RestoreSnapshotState\022\"\n\036RESTORE" +
+      "_SNAPSHOT_PRE_OPERATION\020\001\022,\n(RESTORE_SNA" +
+      "PSHOT_UPDATE_TABLE_DESCRIPTOR\020\002\022$\n RESTO" +
+      "RE_SNAPSHOT_WRITE_FS_LAYOUT\020\003\022 \n\034RESTORE",
+      "_SNAPSHOT_UPDATE_META\020\004\022 \n\034RESTORE_SNAPS" +
+      "HOT_RESTORE_ACL\020\005*\376\003\n\026MergeTableRegionsS" +
+      "tate\022\037\n\033MERGE_TABLE_REGIONS_PREPARE\020\001\022.\n" +
+      "*MERGE_TABLE_REGIONS_MOVE_REGION_TO_SAME" +
+      "_RS\020\002\022+\n\'MERGE_TABLE_REGIONS_PRE_MERGE_O" +
+      "PERATION\020\003\022/\n+MERGE_TABLE_REGIONS_SET_ME" +
+      "RGING_TABLE_STATE\020\004\022%\n!MERGE_TABLE_REGIO" +
+      "NS_CLOSE_REGIONS\020\005\022,\n(MERGE_TABLE_REGION" +
+      "S_CREATE_MERGED_REGION\020\006\0222\n.MERGE_TABLE_" +
+      "REGIONS_PRE_MERGE_COMMIT_OPERATION\020\007\022#\n\037",
+      "MERGE_TABLE_REGIONS_UPDATE_META\020\010\0223\n/MER" +
+      "GE_TABLE_REGIONS_POST_MERGE_COMMIT_OPERA" +
+      "TION\020\t\022*\n&MERGE_TABLE_REGIONS_OPEN_MERGE" +
+      "D_REGION\020\n\022&\n\"MERGE_TABLE_REGIONS_POST_O" +
+      "PERATION\020\013*\304\003\n\025SplitTableRegionState\022\036\n\032" +
+      "SPLIT_TABLE_REGION_PREPARE\020\001\022$\n SPLIT_TA" +
+      "BLE_REGION_PRE_OPERATION\020\002\0220\n,SPLIT_TABL" +
+      "E_REGION_SET_SPLITTING_TABLE_STATE\020\003\022*\n&" +
+      "SPLIT_TABLE_REGION_CLOSE_PARENT_REGION\020\004" +
+      "\022.\n*SPLIT_TABLE_REGION_CREATE_DAUGHTER_R",
+      "EGIONS\020\005\0220\n,SPLIT_TABLE_REGION_PRE_OPERA" +
+      "TION_BEFORE_PONR\020\006\022\"\n\036SPLIT_TABLE_REGION" +
+      "_UPDATE_META\020\007\022/\n+SPLIT_TABLE_REGION_PRE" +
+      "_OPERATION_AFTER_PONR\020\010\022)\n%SPLIT_TABLE_R" +
+      "EGION_OPEN_CHILD_REGIONS\020\t\022%\n!SPLIT_TABL" +
+      "E_REGION_POST_OPERATION\020\n*\234\002\n\020ServerCras" +
+      "hState\022\026\n\022SERVER_CRASH_START\020\001\022\035\n\031SERVER" +
+      "_CRASH_PROCESS_META\020\002\022\034\n\030SERVER_CRASH_GE" +
+      "T_REGIONS\020\003\022\036\n\032SERVER_CRASH_NO_SPLIT_LOG" +
+      "S\020\004\022\033\n\027SERVER_CRASH_SPLIT_LOGS\020\005\022#\n\037SERV",
+      "ER_CRASH_PREPARE_LOG_REPLAY\020\006\022\027\n\023SERVER_" +
+      "CRASH_ASSIGN\020\010\022\037\n\033SERVER_CRASH_WAIT_ON_A" +
+      "SSIGN\020\t\022\027\n\023SERVER_CRASH_FINISH\020dBR\n1org." +
+      "apache.hadoop.hbase.shaded.protobuf.gene" +
+      "ratedB\025MasterProcedureProtosH\001\210\001\001\240\001\001"
     };
     org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
         new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
@@ -24274,6 +24294,7 @@ public final class MasterProcedureProtos {
         new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(),
           org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.getDescriptor(),
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.getDescriptor(),
         }, assigner);
     internal_static_hbase_pb_CreateTableStateData_descriptor =
       getDescriptor().getMessageTypes().get(0);
@@ -24385,6 +24406,7 @@ public final class MasterProcedureProtos {
         new java.lang.String[] { "ServerName", "DistributedLogReplay", "RegionsOnCrashedServer", "RegionsAssigned", "CarryingMeta", "ShouldSplitWal", });
     org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor();
     org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.getDescriptor();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.getDescriptor();
   }
 
   // @@protoc_insertion_point(outer_class_scope)


[4/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
index e4ce4cb..3560de0 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
@@ -41703,11 +41703,11 @@ public final class MasterProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
   }
   /**
    * Protobuf type {@code hbase.pb.SnapshotRequest}
@@ -41752,11 +41752,11 @@ public final class MasterProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000001) == 0x00000001)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -41790,7 +41790,7 @@ public final class MasterProtos {
 
     private int bitField0_;
     public static final int SNAPSHOT_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
@@ -41800,14 +41800,14 @@ public final class MasterProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -42118,9 +42118,9 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
@@ -42130,9 +42130,9 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -42140,7 +42140,7 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -42157,7 +42157,7 @@ public final class MasterProtos {
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -42170,13 +42170,13 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -42203,7 +42203,7 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -42211,23 +42211,23 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -43138,12 +43138,12 @@ public final class MasterProtos {
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription> 
+    java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription>
         getSnapshotsList();
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index);
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshots(int index);
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
@@ -43151,12 +43151,12 @@ public final class MasterProtos {
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+    java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
         getSnapshotsOrBuilderList();
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
         int index);
   }
   /**
@@ -43204,11 +43204,11 @@ public final class MasterProtos {
             }
             case 10: {
               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-                snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription>();
+                snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription>();
                 mutable_bitField0_ |= 0x00000001;
               }
               snapshots_.add(
-                  input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry));
+                  input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry));
               break;
             }
           }
@@ -43239,17 +43239,17 @@ public final class MasterProtos {
     }
 
     public static final int SNAPSHOTS_FIELD_NUMBER = 1;
-    private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription> snapshots_;
+    private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription> snapshots_;
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList() {
+    public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription> getSnapshotsList() {
       return snapshots_;
     }
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+    public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
         getSnapshotsOrBuilderList() {
       return snapshots_;
     }
@@ -43262,13 +43262,13 @@ public final class MasterProtos {
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) {
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshots(int index) {
       return snapshots_.get(index);
     }
     /**
      * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
         int index) {
       return snapshots_.get(index);
     }
@@ -43597,22 +43597,22 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription> snapshots_ =
+      private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription> snapshots_ =
         java.util.Collections.emptyList();
       private void ensureSnapshotsIsMutable() {
         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-          snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription>(snapshots_);
+          snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription>(snapshots_);
           bitField0_ |= 0x00000001;
          }
       }
 
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_;
 
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList() {
+      public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription> getSnapshotsList() {
         if (snapshotsBuilder_ == null) {
           return java.util.Collections.unmodifiableList(snapshots_);
         } else {
@@ -43632,7 +43632,7 @@ public final class MasterProtos {
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshots(int index) {
         if (snapshotsBuilder_ == null) {
           return snapshots_.get(index);
         } else {
@@ -43643,7 +43643,7 @@ public final class MasterProtos {
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
       public Builder setSnapshots(
-          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotsBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -43660,7 +43660,7 @@ public final class MasterProtos {
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
       public Builder setSnapshots(
-          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotsBuilder_ == null) {
           ensureSnapshotsIsMutable();
           snapshots_.set(index, builderForValue.build());
@@ -43673,7 +43673,7 @@ public final class MasterProtos {
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public Builder addSnapshots(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder addSnapshots(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotsBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -43690,7 +43690,7 @@ public final class MasterProtos {
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
       public Builder addSnapshots(
-          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotsBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -43707,7 +43707,7 @@ public final class MasterProtos {
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
       public Builder addSnapshots(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotsBuilder_ == null) {
           ensureSnapshotsIsMutable();
           snapshots_.add(builderForValue.build());
@@ -43721,7 +43721,7 @@ public final class MasterProtos {
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
       public Builder addSnapshots(
-          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          int index, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotsBuilder_ == null) {
           ensureSnapshotsIsMutable();
           snapshots_.add(index, builderForValue.build());
@@ -43735,7 +43735,7 @@ public final class MasterProtos {
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
       public Builder addAllSnapshots(
-          java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription> values) {
+          java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription> values) {
         if (snapshotsBuilder_ == null) {
           ensureSnapshotsIsMutable();
           org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
@@ -43775,14 +43775,14 @@ public final class MasterProtos {
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotsBuilder(
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotsBuilder(
           int index) {
         return getSnapshotsFieldBuilder().getBuilder(index);
       }
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
           int index) {
         if (snapshotsBuilder_ == null) {
           return snapshots_.get(index);  } else {
@@ -43792,7 +43792,7 @@ public final class MasterProtos {
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+      public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
            getSnapshotsOrBuilderList() {
         if (snapshotsBuilder_ != null) {
           return snapshotsBuilder_.getMessageOrBuilderList();
@@ -43803,31 +43803,31 @@ public final class MasterProtos {
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder addSnapshotsBuilder() {
         return getSnapshotsFieldBuilder().addBuilder(
-            org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance());
+            org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance());
       }
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder(
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder addSnapshotsBuilder(
           int index) {
         return getSnapshotsFieldBuilder().addBuilder(
-            index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance());
+            index, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance());
       }
       /**
        * <code>repeated .hbase.pb.SnapshotDescription snapshots = 1;</code>
        */
-      public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder> 
+      public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder>
            getSnapshotsBuilderList() {
         return getSnapshotsFieldBuilder().getBuilderList();
       }
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotsFieldBuilder() {
         if (snapshotsBuilder_ == null) {
           snapshotsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   snapshots_,
                   ((bitField0_ & 0x00000001) == 0x00000001),
                   getParentForChildren(),
@@ -43896,11 +43896,11 @@ public final class MasterProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
   }
   /**
    * Protobuf type {@code hbase.pb.DeleteSnapshotRequest}
@@ -43945,11 +43945,11 @@ public final class MasterProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000001) == 0x00000001)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -43983,7 +43983,7 @@ public final class MasterProtos {
 
     private int bitField0_;
     public static final int SNAPSHOT_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
@@ -43993,14 +43993,14 @@ public final class MasterProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -44311,9 +44311,9 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
@@ -44323,9 +44323,9 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -44333,7 +44333,7 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -44350,7 +44350,7 @@ public final class MasterProtos {
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -44363,13 +44363,13 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -44396,7 +44396,7 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -44404,23 +44404,23 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -44861,11 +44861,11 @@ public final class MasterProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
 
     /**
      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
@@ -44884,6 +44884,15 @@ public final class MasterProtos {
      * <code>optional uint64 nonce = 3 [default = 0];</code>
      */
     long getNonce();
+
+    /**
+     * <code>optional bool restoreACL = 4 [default = false];</code>
+     */
+    boolean hasRestoreACL();
+    /**
+     * <code>optional bool restoreACL = 4 [default = false];</code>
+     */
+    boolean getRestoreACL();
   }
   /**
    * Protobuf type {@code hbase.pb.RestoreSnapshotRequest}
@@ -44899,6 +44908,7 @@ public final class MasterProtos {
     private RestoreSnapshotRequest() {
       nonceGroup_ = 0L;
       nonce_ = 0L;
+      restoreACL_ = false;
     }
 
     @java.lang.Override
@@ -44930,11 +44940,11 @@ public final class MasterProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000001) == 0x00000001)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -44952,6 +44962,11 @@ public final class MasterProtos {
               nonce_ = input.readUInt64();
               break;
             }
+            case 32: {
+              bitField0_ |= 0x00000008;
+              restoreACL_ = input.readBool();
+              break;
+            }
           }
         }
       } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
@@ -44978,7 +44993,7 @@ public final class MasterProtos {
 
     private int bitField0_;
     public static final int SNAPSHOT_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
@@ -44988,14 +45003,14 @@ public final class MasterProtos {
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     public static final int NONCE_GROUP_FIELD_NUMBER = 2;
@@ -45028,6 +45043,21 @@ public final class MasterProtos {
       return nonce_;
     }
 
+    public static final int RESTOREACL_FIELD_NUMBER = 4;
+    private boolean restoreACL_;
+    /**
+     * <code>optional bool restoreACL = 4 [default = false];</code>
+     */
+    public boolean hasRestoreACL() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional bool restoreACL = 4 [default = false];</code>
+     */
+    public boolean getRestoreACL() {
+      return restoreACL_;
+    }
+
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
@@ -45057,6 +45087,9 @@ public final class MasterProtos {
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
         output.writeUInt64(3, nonce_);
       }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBool(4, restoreACL_);
+      }
       unknownFields.writeTo(output);
     }
 
@@ -45077,6 +45110,10 @@ public final class MasterProtos {
         size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
           .computeUInt64Size(3, nonce_);
       }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeBoolSize(4, restoreACL_);
+      }
       size += unknownFields.getSerializedSize();
       memoizedSize = size;
       return size;
@@ -45109,6 +45146,11 @@ public final class MasterProtos {
         result = result && (getNonce()
             == other.getNonce());
       }
+      result = result && (hasRestoreACL() == other.hasRestoreACL());
+      if (hasRestoreACL()) {
+        result = result && (getRestoreACL()
+            == other.getRestoreACL());
+      }
       result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
@@ -45134,6 +45176,11 @@ public final class MasterProtos {
         hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
             getNonce());
       }
+      if (hasRestoreACL()) {
+        hash = (37 * hash) + RESTOREACL_FIELD_NUMBER;
+        hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
+            getRestoreACL());
+      }
       hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -45263,6 +45310,8 @@ public final class MasterProtos {
         bitField0_ = (bitField0_ & ~0x00000002);
         nonce_ = 0L;
         bitField0_ = (bitField0_ & ~0x00000004);
+        restoreACL_ = false;
+        bitField0_ = (bitField0_ & ~0x00000008);
         return this;
       }
 
@@ -45303,6 +45352,10 @@ public final class MasterProtos {
           to_bitField0_ |= 0x00000004;
         }
         result.nonce_ = nonce_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.restoreACL_ = restoreACL_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -45354,6 +45407,9 @@ public final class MasterProtos {
         if (other.hasNonce()) {
           setNonce(other.getNonce());
         }
+        if (other.hasRestoreACL()) {
+          setRestoreACL(other.getRestoreACL());
+        }
         this.mergeUnknownFields(other.unknownFields);
         onChanged();
         return this;
@@ -45388,9 +45444,9 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
@@ -45400,9 +45456,9 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -45410,7 +45466,7 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -45427,7 +45483,7 @@ public final class MasterProtos {
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -45440,13 +45496,13 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -45473,7 +45529,7 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -45481,23 +45537,23 @@ public final class MasterProtos {
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>required .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -45569,6 +45625,38 @@ public final class MasterProtos {
         onChanged();
         return this;
       }
+
+      private boolean restoreACL_ ;
+      /**
+       * <code>optional bool restoreACL = 4 [default = false];</code>
+       */
+      public boolean hasRestoreACL() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional bool restoreACL = 4 [default = false];</code>
+       */
+      public boolean getRestoreACL() {
+        return restoreACL_;
+      }
+      /**
+       * <code>optional bool restoreACL = 4 [default = false];</code>
+       */
+      public Builder setRestoreACL(boolean value) {
+        bitField0_ |= 0x00000008;
+        restoreACL_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bool restoreACL = 4 [default = false];</code>
+       */
+      public Builder clearRestoreACL() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        restoreACL_ = false;
+        onChanged();
+        return this;
+      }
       public final Builder setUnknownFields(
           final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
         return super.setUnknownFields(unknownFields);
@@ -46103,11 +46191,11 @@ public final class MasterProtos {
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
   }
   /**
    * <pre>
@@ -46157,11 +46245,11 @@ public final class MasterProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000001) == 0x00000001)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -46195,7 +46283,7 @@ public final class MasterProtos {
 
     private int bitField0_;
     public static final int SNAPSHOT_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
@@ -46205,14 +46293,14 @@ public final class MasterProtos {
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -46525,9 +46613,9 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
@@ -46537,9 +46625,9 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -46547,7 +46635,7 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -46564,7 +46652,7 @@ public final class MasterProtos {
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -46577,13 +46665,13 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -46610,7 +46698,7 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -46618,23 +46706,23 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -46711,11 +46799,11 @@ public final class MasterProtos {
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
   }
   /**
    * Protobuf type {@code hbase.pb.IsSnapshotDoneResponse}
@@ -46766,11 +46854,11 @@ public final class MasterProtos {
               break;
             }
             case 18: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000002) == 0x00000002)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -46819,7 +46907,7 @@ public final class MasterProtos {
     }
 
     public static final int SNAPSHOT_FIELD_NUMBER = 2;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
@@ -46829,14 +46917,14 @@ public final class MasterProtos {
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -47202,9 +47290,9 @@ public final class MasterProtos {
         return this;
       }
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
@@ -47214,9 +47302,9 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -47224,7 +47312,7 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -47241,7 +47329,7 @@ public final class MasterProtos {
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -47254,13 +47342,13 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000002) == 0x00000002) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -47287,7 +47375,7 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000002;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -47295,23 +47383,23 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 2;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -47379,11 +47467,11 @@ public final class MasterProtos {
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot();
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
   }
   /**
    * Protobuf type {@code hbase.pb.IsRestoreSnapshotDoneRequest}
@@ -47428,11 +47516,11 @@ public final class MasterProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder subBuilder = null;
               if (((bitField0_ & 0x00000001) == 0x00000001)) {
                 subBuilder = snapshot_.toBuilder();
               }
-              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
+              snapshot_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.PARSER, extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(snapshot_);
                 snapshot_ = subBuilder.buildPartial();
@@ -47466,7 +47554,7 @@ public final class MasterProtos {
 
     private int bitField0_;
     public static final int SNAPSHOT_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_;
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
@@ -47476,14 +47564,14 @@ public final class MasterProtos {
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
     /**
      * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
      */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
-      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -47791,9 +47879,9 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = null;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription snapshot_ = null;
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
@@ -47803,9 +47891,9 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getSnapshot() {
         if (snapshotBuilder_ == null) {
-          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+          return snapshot_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         } else {
           return snapshotBuilder_.getMessage();
         }
@@ -47813,7 +47901,7 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder setSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -47830,7 +47918,7 @@ public final class MasterProtos {
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       public Builder setSnapshot(
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder builderForValue) {
         if (snapshotBuilder_ == null) {
           snapshot_ = builderForValue.build();
           onChanged();
@@ -47843,13 +47931,13 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription value) {
+      public Builder mergeSnapshot(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription value) {
         if (snapshotBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
               snapshot_ != null &&
-              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
+              snapshot_ != org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) {
             snapshot_ =
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
           } else {
             snapshot_ = value;
           }
@@ -47876,7 +47964,7 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder getSnapshotBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getSnapshotFieldBuilder().getBuilder();
@@ -47884,23 +47972,23 @@ public final class MasterProtos {
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
         if (snapshotBuilder_ != null) {
           return snapshotBuilder_.getMessageOrBuilder();
         } else {
           return snapshot_ == null ?
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance() : snapshot_;
         }
       }
       /**
        * <code>optional .hbase.pb.SnapshotDescription snapshot = 1;</code>
        */
       private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> 
+          org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>
           getSnapshotFieldBuilder() {
         if (snapshotBuilder_ == null) {
           snapshotBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder>(
                   getSnapshot(),
                   getParentForChildren(),
                   isClean());
@@ -77670,385 +77758,386 @@ public final class MasterProtos {
       "lient.proto\032\023ClusterStatus.proto\032\023ErrorH" +
       "andling.proto\032\021LockService.proto\032\017Proced" +
       "ure.proto\032\013Quota.proto\032\021Replication.prot" +
-      "o\"\234\001\n\020AddColumnRequest\022\'\n\ntable_name\030\001 \002" +
-      "(\0132\023.hbase.pb.TableName\0225\n\017column_famili" +
-      "es\030\002 \002(\0132\034.hbase.pb.ColumnFamilySchema\022\026" +
-      "\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010" +
-      "\"$\n\021AddColumnResponse\022\017\n\007proc_id\030\001 \001(\004\"}" +
-      "\n\023DeleteColumnRequest\022\'\n\ntable_name\030\001 \002(",
-      "\0132\023.hbase.pb.TableName\022\023\n\013column_name\030\002 " +
-      "\002(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001" +
-      "(\004:\0010\"\'\n\024DeleteColumnResponse\022\017\n\007proc_id" +
-      "\030\001 \001(\004\"\237\001\n\023ModifyColumnRequest\022\'\n\ntable_" +
-      "name\030\001 \002(\0132\023.hbase.pb.TableName\0225\n\017colum" +
-      "n_families\030\002 \002(\0132\034.hbase.pb.ColumnFamily" +
-      "Schema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030" +
-      "\004 \001(\004:\0010\"\'\n\024ModifyColumnResponse\022\017\n\007proc" +
-      "_id\030\001 \001(\004\"n\n\021MoveRegionRequest\022)\n\006region" +
-      "\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022.\n\020des",
-      "t_server_name\030\002 \001(\0132\024.hbase.pb.ServerNam" +
-      "e\"\024\n\022MoveRegionResponse\"\210\001\n\030MergeTableRe" +
-      "gionsRequest\022)\n\006region\030\001 \003(\0132\031.hbase.pb." +
-      "RegionSpecifier\022\027\n\010forcible\030\003 \001(\010:\005false" +
-      "\022\026\n\013nonce_group\030\004 \001(\004:\0010\022\020\n\005nonce\030\005 \001(\004:" +
-      "\0010\",\n\031MergeTableRegionsResponse\022\017\n\007proc_" +
-      "id\030\001 \001(\004\"@\n\023AssignRegionRequest\022)\n\006regio" +
-      "n\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"\026\n\024As" +
-      "signRegionResponse\"X\n\025UnassignRegionRequ" +
-      "est\022)\n\00

<TRUNCATED>

[6/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
index de068e2..a567f97 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
@@ -19532,7 +19532,6 @@ public final class AdminProtos {
   }
   /**
    * <pre>
-   *
    * Roll request responses no longer include regions to flush
    * this list will always be empty when talking to a 1.0 server
    * </pre>
@@ -19798,7 +19797,6 @@ public final class AdminProtos {
     }
     /**
      * <pre>
-     *
      * Roll request responses no longer include regions to flush
      * this list will always be empty when talking to a 1.0 server
      * </pre>

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
index 5ed1187..b3b0831 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
@@ -12208,1316 +12208,6 @@ public final class HBaseProtos {
 
   }
 
-  public interface SnapshotDescriptionOrBuilder extends
-      // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotDescription)
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
-
-    /**
-     * <code>required string name = 1;</code>
-     */
-    boolean hasName();
-    /**
-     * <code>required string name = 1;</code>
-     */
-    java.lang.String getName();
-    /**
-     * <code>required string name = 1;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getNameBytes();
-
-    /**
-     * <pre>
-     * not needed for delete, but checked for in taking snapshot
-     * </pre>
-     *
-     * <code>optional string table = 2;</code>
-     */
-    boolean hasTable();
-    /**
-     * <pre>
-     * not needed for delete, but checked for in taking snapshot
-     * </pre>
-     *
-     * <code>optional string table = 2;</code>
-     */
-    java.lang.String getTable();
-    /**
-     * <pre>
-     * not needed for delete, but checked for in taking snapshot
-     * </pre>
-     *
-     * <code>optional string table = 2;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getTableBytes();
-
-    /**
-     * <code>optional int64 creation_time = 3 [default = 0];</code>
-     */
-    boolean hasCreationTime();
-    /**
-     * <code>optional int64 creation_time = 3 [default = 0];</code>
-     */
-    long getCreationTime();
-
-    /**
-     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-     */
-    boolean hasType();
-    /**
-     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-     */
-    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType();
-
-    /**
-     * <code>optional int32 version = 5;</code>
-     */
-    boolean hasVersion();
-    /**
-     * <code>optional int32 version = 5;</code>
-     */
-    int getVersion();
-
-    /**
-     * <code>optional string owner = 6;</code>
-     */
-    boolean hasOwner();
-    /**
-     * <code>optional string owner = 6;</code>
-     */
-    java.lang.String getOwner();
-    /**
-     * <code>optional string owner = 6;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getOwnerBytes();
-  }
-  /**
-   * <pre>
-   **
-   * Description of the snapshot to take
-   * </pre>
-   *
-   * Protobuf type {@code hbase.pb.SnapshotDescription}
-   */
-  public  static final class SnapshotDescription extends
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
-      // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotDescription)
-      SnapshotDescriptionOrBuilder {
-    // Use SnapshotDescription.newBuilder() to construct.
-    private SnapshotDescription(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
-      super(builder);
-    }
-    private SnapshotDescription() {
-      name_ = "";
-      table_ = "";
-      creationTime_ = 0L;
-      type_ = 1;
-      version_ = 0;
-      owner_ = "";
-    }
-
-    @java.lang.Override
-    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
-    getUnknownFields() {
-      return this.unknownFields;
-    }
-    private SnapshotDescription(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      this();
-      int mutable_bitField0_ = 0;
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
-              bitField0_ |= 0x00000001;
-              name_ = bs;
-              break;
-            }
-            case 18: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
-              bitField0_ |= 0x00000002;
-              table_ = bs;
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              creationTime_ = input.readInt64();
-              break;
-            }
-            case 32: {
-              int rawValue = input.readEnum();
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(4, rawValue);
-              } else {
-                bitField0_ |= 0x00000008;
-                type_ = rawValue;
-              }
-              break;
-            }
-            case 40: {
-              bitField0_ |= 0x00000010;
-              version_ = input.readInt32();
-              break;
-            }
-            case 50: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
-              bitField0_ |= 0x00000020;
-              owner_ = bs;
-              break;
-            }
-          }
-        }
-      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
-            e).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
-    }
-
-    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class);
-    }
-
-    /**
-     * Protobuf enum {@code hbase.pb.SnapshotDescription.Type}
-     */
-    public enum Type
-        implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
-      /**
-       * <code>DISABLED = 0;</code>
-       */
-      DISABLED(0),
-      /**
-       * <code>FLUSH = 1;</code>
-       */
-      FLUSH(1),
-      /**
-       * <code>SKIPFLUSH = 2;</code>
-       */
-      SKIPFLUSH(2),
-      ;
-
-      /**
-       * <code>DISABLED = 0;</code>
-       */
-      public static final int DISABLED_VALUE = 0;
-      /**
-       * <code>FLUSH = 1;</code>
-       */
-      public static final int FLUSH_VALUE = 1;
-      /**
-       * <code>SKIPFLUSH = 2;</code>
-       */
-      public static final int SKIPFLUSH_VALUE = 2;
-
-
-      public final int getNumber() {
-        return value;
-      }
-
-      /**
-       * @deprecated Use {@link #forNumber(int)} instead.
-       */
-      @java.lang.Deprecated
-      public static Type valueOf(int value) {
-        return forNumber(value);
-      }
-
-      public static Type forNumber(int value) {
-        switch (value) {
-          case 0: return DISABLED;
-          case 1: return FLUSH;
-          case 2: return SKIPFLUSH;
-          default: return null;
-        }
-      }
-
-      public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>
-          internalGetValueMap() {
-        return internalValueMap;
-      }
-      private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
-          Type> internalValueMap =
-            new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>() {
-              public Type findValueByNumber(int number) {
-                return Type.forNumber(number);
-              }
-            };
-
-      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
-          getValueDescriptor() {
-        return getDescriptor().getValues().get(ordinal());
-      }
-      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptorForType() {
-        return getDescriptor();
-      }
-      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0);
-      }
-
-      private static final Type[] VALUES = values();
-
-      public static Type valueOf(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-        if (desc.getType() != getDescriptor()) {
-          throw new java.lang.IllegalArgumentException(
-            "EnumValueDescriptor is not for this type.");
-        }
-        return VALUES[desc.getIndex()];
-      }
-
-      private final int value;
-
-      private Type(int value) {
-        this.value = value;
-      }
-
-      // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type)
-    }
-
-    private int bitField0_;
-    public static final int NAME_FIELD_NUMBER = 1;
-    private volatile java.lang.Object name_;
-    /**
-     * <code>required string name = 1;</code>
-     */
-    public boolean hasName() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>required string name = 1;</code>
-     */
-    public java.lang.String getName() {
-      java.lang.Object ref = name_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
-            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          name_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>required string name = 1;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getNameBytes() {
-      java.lang.Object ref = name_;
-      if (ref instanceof java.lang.String) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        name_ = b;
-        return b;
-      } else {
-        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    public static final int TABLE_FIELD_NUMBER = 2;
-    private volatile java.lang.Object table_;
-    /**
-     * <pre>
-     * not needed for delete, but checked for in taking snapshot
-     * </pre>
-     *
-     * <code>optional string table = 2;</code>
-     */
-    public boolean hasTable() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    /**
-     * <pre>
-     * not needed for delete, but checked for in taking snapshot
-     * </pre>
-     *
-     * <code>optional string table = 2;</code>
-     */
-    public java.lang.String getTable() {
-      java.lang.Object ref = table_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
-            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          table_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <pre>
-     * not needed for delete, but checked for in taking snapshot
-     * </pre>
-     *
-     * <code>optional string table = 2;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getTableBytes() {
-      java.lang.Object ref = table_;
-      if (ref instanceof java.lang.String) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        table_ = b;
-        return b;
-      } else {
-        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    public static final int CREATION_TIME_FIELD_NUMBER = 3;
-    private long creationTime_;
-    /**
-     * <code>optional int64 creation_time = 3 [default = 0];</code>
-     */
-    public boolean hasCreationTime() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    /**
-     * <code>optional int64 creation_time = 3 [default = 0];</code>
-     */
-    public long getCreationTime() {
-      return creationTime_;
-    }
-
-    public static final int TYPE_FIELD_NUMBER = 4;
-    private int type_;
-    /**
-     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-     */
-    public boolean hasType() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    /**
-     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-     */
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() {
-      org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(type_);
-      return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH : result;
-    }
-
-    public static final int VERSION_FIELD_NUMBER = 5;
-    private int version_;
-    /**
-     * <code>optional int32 version = 5;</code>
-     */
-    public boolean hasVersion() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    /**
-     * <code>optional int32 version = 5;</code>
-     */
-    public int getVersion() {
-      return version_;
-    }
-
-    public static final int OWNER_FIELD_NUMBER = 6;
-    private volatile java.lang.Object owner_;
-    /**
-     * <code>optional string owner = 6;</code>
-     */
-    public boolean hasOwner() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
-    }
-    /**
-     * <code>optional string owner = 6;</code>
-     */
-    public java.lang.String getOwner() {
-      java.lang.Object ref = owner_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
-            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          owner_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>optional string owner = 6;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getOwnerBytes() {
-      java.lang.Object ref = owner_;
-      if (ref instanceof java.lang.String) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        owner_ = b;
-        return b;
-      } else {
-        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized == 1) return true;
-      if (isInitialized == 0) return false;
-
-      if (!hasName()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, table_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeInt64(3, creationTime_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeEnum(4, type_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeInt32(5, version_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, owner_);
-      }
-      unknownFields.writeTo(output);
-    }
-
-    public int getSerializedSize() {
-      int size = memoizedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, table_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeInt64Size(3, creationTime_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeEnumSize(4, type_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeInt32Size(5, version_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(6, owner_);
-      }
-      size += unknownFields.getSerializedSize();
-      memoizedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription)) {
-        return super.equals(obj);
-      }
-      org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription) obj;
-
-      boolean result = true;
-      result = result && (hasName() == other.hasName());
-      if (hasName()) {
-        result = result && getName()
-            .equals(other.getName());
-      }
-      result = result && (hasTable() == other.hasTable());
-      if (hasTable()) {
-        result = result && getTable()
-            .equals(other.getTable());
-      }
-      result = result && (hasCreationTime() == other.hasCreationTime());
-      if (hasCreationTime()) {
-        result = result && (getCreationTime()
-            == other.getCreationTime());
-      }
-      result = result && (hasType() == other.hasType());
-      if (hasType()) {
-        result = result && type_ == other.type_;
-      }
-      result = result && (hasVersion() == other.hasVersion());
-      if (hasVersion()) {
-        result = result && (getVersion()
-            == other.getVersion());
-      }
-      result = result && (hasOwner() == other.hasOwner());
-      if (hasOwner()) {
-        result = result && getOwner()
-            .equals(other.getOwner());
-      }
-      result = result && unknownFields.equals(other.unknownFields);
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
-      int hash = 41;
-      hash = (19 * hash) + getDescriptor().hashCode();
-      if (hasName()) {
-        hash = (37 * hash) + NAME_FIELD_NUMBER;
-        hash = (53 * hash) + getName().hashCode();
-      }
-      if (hasTable()) {
-        hash = (37 * hash) + TABLE_FIELD_NUMBER;
-        hash = (53 * hash) + getTable().hashCode();
-      }
-      if (hasCreationTime()) {
-        hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER;
-        hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
-            getCreationTime());
-      }
-      if (hasType()) {
-        hash = (37 * hash) + TYPE_FIELD_NUMBER;
-        hash = (53 * hash) + type_;
-      }
-      if (hasVersion()) {
-        hash = (37 * hash) + VERSION_FIELD_NUMBER;
-        hash = (53 * hash) + getVersion();
-      }
-      if (hasOwner()) {
-        hash = (37 * hash) + OWNER_FIELD_NUMBER;
-        hash = (53 * hash) + getOwner().hashCode();
-      }
-      hash = (29 * hash) + unknownFields.hashCode();
-      memoizedHashCode = hash;
-      return hash;
-    }
-
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
-        byte[] data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder() {
-      return DEFAULT_INSTANCE.toBuilder();
-    }
-    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription prototype) {
-      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() {
-      return this == DEFAULT_INSTANCE
-          ? new Builder() : new Builder().mergeFrom(this);
-    }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * <pre>
-     **
-     * Description of the snapshot to take
-     * </pre>
-     *
-     * Protobuf type {@code hbase.pb.SnapshotDescription}
-     */
-    public static final class Builder extends
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
-        // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotDescription)
-        org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder {
-      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
-      }
-
-      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class);
-      }
-
-      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-                .alwaysUseFieldBuilders) {
-        }
-      }
-      public Builder clear() {
-        super.clear();
-        name_ = "";
-        bitField0_ = (bitField0_ & ~0x00000001);
-        table_ = "";
-        bitField0_ = (bitField0_ & ~0x00000002);
-        creationTime_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        type_ = 1;
-        bitField0_ = (bitField0_ & ~0x00000008);
-        version_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000010);
-        owner_ = "";
-        bitField0_ = (bitField0_ & ~0x00000020);
-        return this;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
-      }
-
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
-      }
-
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription build() {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.name_ = name_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.table_ = table_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.creationTime_ = creationTime_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.type_ = type_;
-        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
-          to_bitField0_ |= 0x00000010;
-        }
-        result.version_ = version_;
-        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
-          to_bitField0_ |= 0x00000020;
-        }
-        result.owner_ = owner_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder clone() {
-        return (Builder) super.clone();
-      }
-      public Builder setField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          Object value) {
-        return (Builder) super.setField(field, value);
-      }
-      public Builder clearField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
-        return (Builder) super.clearField(field);
-      }
-      public Builder clearOneof(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
-        return (Builder) super.clearOneof(oneof);
-      }
-      public Builder setRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          int index, Object value) {
-        return (Builder) super.setRepeatedField(field, index, value);
-      }
-      public Builder addRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          Object value) {
-        return (Builder) super.addRepeatedField(field, value);
-      }
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription) {
-          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription other) {
-        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this;
-        if (other.hasName()) {
-          bitField0_ |= 0x00000001;
-          name_ = other.name_;
-          onChanged();
-        }
-        if (other.hasTable()) {
-          bitField0_ |= 0x00000002;
-          table_ = other.table_;
-          onChanged();
-        }
-        if (other.hasCreationTime()) {
-          setCreationTime(other.getCreationTime());
-        }
-        if (other.hasType()) {
-          setType(other.getType());
-        }
-        if (other.hasVersion()) {
-          setVersion(other.getVersion());
-        }
-        if (other.hasOwner()) {
-          bitField0_ |= 0x00000020;
-          owner_ = other.owner_;
-          onChanged();
-        }
-        this.mergeUnknownFields(other.unknownFields);
-        onChanged();
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasName()) {
-          return false;
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription) e.getUnfinishedMessage();
-          throw e.unwrapIOException();
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      private java.lang.Object name_ = "";
-      /**
-       * <code>required string name = 1;</code>
-       */
-      public boolean hasName() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>required string name = 1;</code>
-       */
-      public java.lang.String getName() {
-        java.lang.Object ref = name_;
-        if (!(ref instanceof java.lang.String)) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
-              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-          java.lang.String s = bs.toStringUtf8();
-          if (bs.isValidUtf8()) {
-            name_ = s;
-          }
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <code>required string name = 1;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-          getNameBytes() {
-        java.lang.Object ref = name_;
-        if (ref instanceof String) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          name_ = b;
-          return b;
-        } else {
-          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <code>required string name = 1;</code>
-       */
-      public Builder setName(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        name_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required string name = 1;</code>
-       */
-      public Builder clearName() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        name_ = getDefaultInstance().getName();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required string name = 1;</code>
-       */
-      public Builder setNameBytes(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        name_ = value;
-        onChanged();
-        return this;
-      }
-
-      private java.lang.Object table_ = "";
-      /**
-       * <pre>
-       * not needed for delete, but checked for in taking snapshot
-       * </pre>
-       *
-       * <code>optional string table = 2;</code>
-       */
-      public boolean hasTable() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      /**
-       * <pre>
-       * not needed for delete, but checked for in taking snapshot
-       * </pre>
-       *
-       * <code>optional string table = 2;</code>
-       */
-      public java.lang.String getTable() {
-        java.lang.Object ref = table_;
-        if (!(ref instanceof java.lang.String)) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
-              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-          java.lang.String s = bs.toStringUtf8();
-          if (bs.isValidUtf8()) {
-            table_ = s;
-          }
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <pre>
-       * not needed for delete, but checked for in taking snapshot
-       * </pre>
-       *
-       * <code>optional string table = 2;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-          getTableBytes() {
-        java.lang.Object ref = table_;
-        if (ref instanceof String) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          table_ = b;
-          return b;
-        } else {
-          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <pre>
-       * not needed for delete, but checked for in taking snapshot
-       * </pre>
-       *
-       * <code>optional string table = 2;</code>
-       */
-      public Builder setTable(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        table_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * not needed for delete, but checked for in taking snapshot
-       * </pre>
-       *
-       * <code>optional string table = 2;</code>
-       */
-      public Builder clearTable() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        table_ = getDefaultInstance().getTable();
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * not needed for delete, but checked for in taking snapshot
-       * </pre>
-       *
-       * <code>optional string table = 2;</code>
-       */
-      public Builder setTableBytes(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        table_ = value;
-        onChanged();
-        return this;
-      }
-
-      private long creationTime_ ;
-      /**
-       * <code>optional int64 creation_time = 3 [default = 0];</code>
-       */
-      public boolean hasCreationTime() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      /**
-       * <code>optional int64 creation_time = 3 [default = 0];</code>
-       */
-      public long getCreationTime() {
-        return creationTime_;
-      }
-      /**
-       * <code>optional int64 creation_time = 3 [default = 0];</code>
-       */
-      public Builder setCreationTime(long value) {
-        bitField0_ |= 0x00000004;
-        creationTime_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int64 creation_time = 3 [default = 0];</code>
-       */
-      public Builder clearCreationTime() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        creationTime_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      private int type_ = 1;
-      /**
-       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-       */
-      public boolean hasType() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      /**
-       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-       */
-      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() {
-        org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(type_);
-        return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH : result;
-      }
-      /**
-       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-       */
-      public Builder setType(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) {
-        if (value == null) {
-          throw new NullPointerException();
-        }
-        bitField0_ |= 0x00000008;
-        type_ = value.getNumber();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
-       */
-      public Builder clearType() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        type_ = 1;
-        onChanged();
-        return this;
-      }
-
-      private int version_ ;
-      /**
-       * <code>optional int32 version = 5;</code>
-       */
-      public boolean hasVersion() {
-        return ((bitField0_ & 0x00000010) == 0x00000010);
-      }
-      /**
-       * <code>optional int32 version = 5;</code>
-       */
-      public int getVersion() {
-        return version_;
-      }
-      /**
-       * <code>optional int32 version = 5;</code>
-       */
-      public Builder setVersion(int value) {
-        bitField0_ |= 0x00000010;
-        version_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 version = 5;</code>
-       */
-      public Builder clearVersion() {
-        bitField0_ = (bitField0_ & ~0x00000010);
-        version_ = 0;
-        onChanged();
-        return this;
-      }
-
-      private java.lang.Object owner_ = "";
-      /**
-       * <code>optional string owner = 6;</code>
-       */
-      public boolean hasOwner() {
-        return ((bitField0_ & 0x00000020) == 0x00000020);
-      }
-      /**
-       * <code>optional string owner = 6;</code>
-       */
-      public java.lang.String getOwner() {
-        java.lang.Object ref = owner_;
-        if (!(ref instanceof java.lang.String)) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
-              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-          java.lang.String s = bs.toStringUtf8();
-          if (bs.isValidUtf8()) {
-            owner_ = s;
-          }
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <code>optional string owner = 6;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-          getOwnerBytes() {
-        java.lang.Object ref = owner_;
-        if (ref instanceof String) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          owner_ = b;
-          return b;
-        } else {
-          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <code>optional string owner = 6;</code>
-       */
-      public Builder setOwner(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000020;
-        owner_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional string owner = 6;</code>
-       */
-      public Builder clearOwner() {
-        bitField0_ = (bitField0_ & ~0x00000020);
-        owner_ = getDefaultInstance().getOwner();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional string owner = 6;</code>
-       */
-      public Builder setOwnerBytes(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000020;
-        owner_ = value;
-        onChanged();
-        return this;
-      }
-      public final Builder setUnknownFields(
-          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
-        return super.setUnknownFields(unknownFields);
-      }
-
-      public final Builder mergeUnknownFields(
-          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
-        return super.mergeUnknownFields(unknownFields);
-      }
-
-
-      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription)
-    }
-
-    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription)
-    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription DEFAULT_INSTANCE;
-    static {
-      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription();
-    }
-
-    public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstance() {
-      return DEFAULT_INSTANCE;
-    }
-
-    @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SnapshotDescription>
-        PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<SnapshotDescription>() {
-      public SnapshotDescription parsePartialFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-          return new SnapshotDescription(input, extensionRegistry);
-      }
-    };
-
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SnapshotDescription> parser() {
-      return PARSER;
-    }
-
-    @java.lang.Override
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SnapshotDescription> getParserForType() {
-      return PARSER;
-    }
-
-    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() {
-      return DEFAULT_INSTANCE;
-    }
-
-  }
-
   public interface ProcedureDescriptionOrBuilder extends
       // @@protoc_insertion_point(interface_extends:hbase.pb.ProcedureDescription)
       org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
@@ -20383,11 +19073,6 @@ public final class HBaseProtos {
     org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
       internal_static_hbase_pb_NameInt64Pair_fieldAccessorTable;
   private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-    internal_static_hbase_pb_SnapshotDescription_descriptor;
-  private static final 
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-      internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable;
-  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
     internal_static_hbase_pb_ProcedureDescription_descriptor;
   private static final 
     org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
@@ -20474,36 +19159,30 @@ public final class HBaseProtos {
       "eBytesPair\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"" +
       "/\n\016BytesBytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006seco" +
       "nd\030\002 \002(\014\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022" +
-      "\r\n\005value\030\002 \001(\003\"\325\001\n\023SnapshotDescription\022\014" +
-      "\n\004name\030\001 \002(\t\022\r\n\005table\030\002 \001(\t\022\030\n\rcreation_" +
-      "time\030\003 \001(\003:\0010\0227\n\004type\030\004 \001(\0162\".hbase.pb.S" +
-      "napshotDescription.Type:\005FLUSH\022\017\n\007versio" +
-      "n\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\".\n\004Type\022\014\n\010DISABL" +
-      "ED\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002\"\206\001\n\024Proce" +
-      "dureDescription\022\021\n\tsignature\030\001 \002(\t\022\020\n\010in",
-      "stance\030\002 \001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\022/" +
-      "\n\rconfiguration\030\004 \003(\0132\030.hbase.pb.NameStr" +
-      "ingPair\"\n\n\010EmptyMsg\"\033\n\007LongMsg\022\020\n\010long_m" +
-      "sg\030\001 \002(\003\"\037\n\tDoubleMsg\022\022\n\ndouble_msg\030\001 \002(" +
-      "\001\"\'\n\rBigDecimalMsg\022\026\n\016bigdecimal_msg\030\001 \002" +
-      "(\014\"5\n\004UUID\022\026\n\016least_sig_bits\030\001 \002(\004\022\025\n\rmo" +
-      "st_sig_bits\030\002 \002(\004\"T\n\023NamespaceDescriptor" +
-      "\022\014\n\004name\030\001 \002(\014\022/\n\rconfiguration\030\002 \003(\0132\030." +
-      "hbase.pb.NameStringPair\"\235\001\n\013VersionInfo\022" +
-      "\017\n\007version\030\001 \002(\t\022\013\n\003url\030\002 \002(\t\022\020\n\010revisio",
-      "n\030\003 \002(\t\022\014\n\004user\030\004 \002(\t\022\014\n\004date\030\005 \002(\t\022\024\n\014s" +
-      "rc_checksum\030\006 \002(\t\022\025\n\rversion_major\030\007 \001(\r" +
-      "\022\025\n\rversion_minor\030\010 \001(\r\"Q\n\020RegionServerI" +
-      "nfo\022\020\n\010infoPort\030\001 \001(\005\022+\n\014version_info\030\002 " +
-      "\001(\0132\025.hbase.pb.VersionInfo*r\n\013CompareTyp" +
-      "e\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020" +
-      "\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013" +
-      "\n\007GREATER\020\005\022\t\n\005NO_OP\020\006*n\n\010TimeUnit\022\017\n\013NA" +
-      "NOSECONDS\020\001\022\020\n\014MICROSECONDS\020\002\022\020\n\014MILLISE" +
-      "CONDS\020\003\022\013\n\007SECONDS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HOU",
-      "RS\020\006\022\010\n\004DAYS\020\007BE\n1org.apache.hadoop.hbas" +
-      "e.shaded.protobuf.generatedB\013HBaseProtos" +
-      "H\001\240\001\001"
+      "\r\n\005value\030\002 \001(\003\"\206\001\n\024ProcedureDescription\022" +
+      "\021\n\tsignature\030\001 \002(\t\022\020\n\010instance\030\002 \001(\t\022\030\n\r" +
+      "creation_time\030\003 \001(\003:\0010\022/\n\rconfiguration\030" +
+      "\004 \003(\0132\030.hbase.pb.NameStringPair\"\n\n\010Empty" +
+      "Msg\"\033\n\007LongMsg\022\020\n\010long_msg\030\001 \002(\003\"\037\n\tDoub" +
+      "leMsg\022\022\n\ndouble_msg\030\001 \002(\001\"\'\n\rBigDecimalM" +
+      "sg\022\026\n\016bigdecimal_msg\030\001 \002(\014\"5\n\004UUID\022\026\n\016le",
+      "ast_sig_bits\030\001 \002(\004\022\025\n\rmost_sig_bits\030\002 \002(" +
+      "\004\"T\n\023NamespaceDescriptor\022\014\n\004name\030\001 \002(\014\022/" +
+      "\n\rconfiguration\030\002 \003(\0132\030.hbase.pb.NameStr" +
+      "ingPair\"\235\001\n\013VersionInfo\022\017\n\007version\030\001 \002(\t" +
+      "\022\013\n\003url\030\002 \002(\t\022\020\n\010revision\030\003 \002(\t\022\014\n\004user\030" +
+      "\004 \002(\t\022\014\n\004date\030\005 \002(\t\022\024\n\014src_checksum\030\006 \002(" +
+      "\t\022\025\n\rversion_major\030\007 \001(\r\022\025\n\rversion_mino" +
+      "r\030\010 \001(\r\"Q\n\020RegionServerInfo\022\020\n\010infoPort\030" +
+      "\001 \001(\005\022+\n\014version_info\030\002 \001(\0132\025.hbase.pb.V" +
+      "ersionInfo*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rL",
+      "ESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003" +
+      "\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005N" +
+      "O_OP\020\006*n\n\010TimeUnit\022\017\n\013NANOSECONDS\020\001\022\020\n\014M" +
+      "ICROSECONDS\020\002\022\020\n\014MILLISECONDS\020\003\022\013\n\007SECON" +
+      "DS\020\004\022\013\n\007MINUTES\020\005\022\t\n\005HOURS\020\006\022\010\n\004DAYS\020\007BE" +
+      "\n1org.apache.hadoop.hbase.shaded.protobu" +
+      "f.generatedB\013HBaseProtosH\001\240\001\001"
     };
     org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
         new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
@@ -20607,62 +19286,56 @@ public final class HBaseProtos {
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_NameInt64Pair_descriptor,
         new java.lang.String[] { "Name", "Value", });
-    internal_static_hbase_pb_SnapshotDescription_descriptor =
-      getDescriptor().getMessageTypes().get(15);
-    internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-        internal_static_hbase_pb_SnapshotDescription_descriptor,
-        new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", });
     internal_static_hbase_pb_ProcedureDescription_descriptor =
-      getDescriptor().getMessageTypes().get(16);
+      getDescriptor().getMessageTypes().get(15);
     internal_static_hbase_pb_ProcedureDescription_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_ProcedureDescription_descriptor,
         new java.lang.String[] { "Signature", "Instance", "CreationTime", "Configuration", });
     internal_static_hbase_pb_EmptyMsg_descriptor =
-      getDescriptor().getMessageTypes().get(17);
+      getDescriptor().getMessageTypes().get(16);
     internal_static_hbase_pb_EmptyMsg_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_EmptyMsg_descriptor,
         new java.lang.String[] { });
     internal_static_hbase_pb_LongMsg_descriptor =
-      getDescriptor().getMessageTypes().get(18);
+      getDescriptor().getMessageTypes().get(17);
     internal_static_hbase_pb_LongMsg_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_LongMsg_descriptor,
         new java.lang.String[] { "LongMsg", });
     internal_static_hbase_pb_DoubleMsg_descriptor =
-      getDescriptor().getMessageTypes().get(19);
+      getDescriptor().getMessageTypes().get(18);
     internal_static_hbase_pb_DoubleMsg_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_DoubleMsg_descriptor,
         new java.lang.String[] { "DoubleMsg", });
     internal_static_hbase_pb_BigDecimalMsg_descriptor =
-      getDescriptor().getMessageTypes().get(20);
+      getDescriptor().getMessageTypes().get(19);
     internal_static_hbase_pb_BigDecimalMsg_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_BigDecimalMsg_descriptor,
         new java.lang.String[] { "BigdecimalMsg", });
     internal_static_hbase_pb_UUID_descriptor =
-      getDescriptor().getMessageTypes().get(21);
+      getDescriptor().getMessageTypes().get(20);
     internal_static_hbase_pb_UUID_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_UUID_descriptor,
         new java.lang.String[] { "LeastSigBits", "MostSigBits", });
     internal_static_hbase_pb_NamespaceDescriptor_descriptor =
-      getDescriptor().getMessageTypes().get(22);
+      getDescriptor().getMessageTypes().get(21);
     internal_static_hbase_pb_NamespaceDescriptor_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_NamespaceDescriptor_descriptor,
         new java.lang.String[] { "Name", "Configuration", });
     internal_static_hbase_pb_VersionInfo_descriptor =
-      getDescriptor().getMessageTypes().get(23);
+      getDescriptor().getMessageTypes().get(22);
     internal_static_hbase_pb_VersionInfo_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_VersionInfo_descriptor,
         new java.lang.String[] { "Version", "Url", "Revision", "User", "Date", "SrcChecksum", "VersionMajor", "VersionMinor", });
     internal_static_hbase_pb_RegionServerInfo_descriptor =
-      getDescriptor().getMessageTypes().get(24);
+      getDescriptor().getMessageTypes().get(23);
     internal_static_hbase_pb_RegionServerInfo_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_RegionServerInfo_descriptor,


[3/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
index a366f77..454e3bc 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
@@ -14,6 +14,1526 @@ public final class SnapshotProtos {
     registerAllExtensions(
         (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
   }
+  public interface SnapshotDescriptionOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotDescription)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>required string name = 1;</code>
+     */
+    boolean hasName();
+    /**
+     * <code>required string name = 1;</code>
+     */
+    java.lang.String getName();
+    /**
+     * <code>required string name = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getNameBytes();
+
+    /**
+     * <pre>
+     * not needed for delete, but checked for in taking snapshot
+     * </pre>
+     *
+     * <code>optional string table = 2;</code>
+     */
+    boolean hasTable();
+    /**
+     * <pre>
+     * not needed for delete, but checked for in taking snapshot
+     * </pre>
+     *
+     * <code>optional string table = 2;</code>
+     */
+    java.lang.String getTable();
+    /**
+     * <pre>
+     * not needed for delete, but checked for in taking snapshot
+     * </pre>
+     *
+     * <code>optional string table = 2;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getTableBytes();
+
+    /**
+     * <code>optional int64 creation_time = 3 [default = 0];</code>
+     */
+    boolean hasCreationTime();
+    /**
+     * <code>optional int64 creation_time = 3 [default = 0];</code>
+     */
+    long getCreationTime();
+
+    /**
+     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+     */
+    boolean hasType();
+    /**
+     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType();
+
+    /**
+     * <code>optional int32 version = 5;</code>
+     */
+    boolean hasVersion();
+    /**
+     * <code>optional int32 version = 5;</code>
+     */
+    int getVersion();
+
+    /**
+     * <code>optional string owner = 6;</code>
+     */
+    boolean hasOwner();
+    /**
+     * <code>optional string owner = 6;</code>
+     */
+    java.lang.String getOwner();
+    /**
+     * <code>optional string owner = 6;</code>
+     */
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getOwnerBytes();
+
+    /**
+     * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+     */
+    boolean hasUsersAndPermissions();
+    /**
+     * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions();
+    /**
+     * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder();
+  }
+  /**
+   * <pre>
+   **
+   * Description of the snapshot to take
+   * </pre>
+   *
+   * Protobuf type {@code hbase.pb.SnapshotDescription}
+   */
+  public  static final class SnapshotDescription extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.SnapshotDescription)
+      SnapshotDescriptionOrBuilder {
+    // Use SnapshotDescription.newBuilder() to construct.
+    private SnapshotDescription(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private SnapshotDescription() {
+      name_ = "";
+      table_ = "";
+      creationTime_ = 0L;
+      type_ = 1;
+      version_ = 0;
+      owner_ = "";
+    }
+
+    @java.lang.Override
+    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private SnapshotDescription(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      int mutable_bitField0_ = 0;
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000001;
+              name_ = bs;
+              break;
+            }
+            case 18: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000002;
+              table_ = bs;
+              break;
+            }
+            case 24: {
+              bitField0_ |= 0x00000004;
+              creationTime_ = input.readInt64();
+              break;
+            }
+            case 32: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(4, rawValue);
+              } else {
+                bitField0_ |= 0x00000008;
+                type_ = rawValue;
+              }
+              break;
+            }
+            case 40: {
+              bitField0_ |= 0x00000010;
+              version_ = input.readInt32();
+              break;
+            }
+            case 50: {
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
+              bitField0_ |= 0x00000020;
+              owner_ = bs;
+              break;
+            }
+            case 58: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000040) == 0x00000040)) {
+                subBuilder = usersAndPermissions_.toBuilder();
+              }
+              usersAndPermissions_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(usersAndPermissions_);
+                usersAndPermissions_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000040;
+              break;
+            }
+          }
+        }
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class);
+    }
+
+    /**
+     * Protobuf enum {@code hbase.pb.SnapshotDescription.Type}
+     */
+    public enum Type
+        implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
+      /**
+       * <code>DISABLED = 0;</code>
+       */
+      DISABLED(0),
+      /**
+       * <code>FLUSH = 1;</code>
+       */
+      FLUSH(1),
+      /**
+       * <code>SKIPFLUSH = 2;</code>
+       */
+      SKIPFLUSH(2),
+      ;
+
+      /**
+       * <code>DISABLED = 0;</code>
+       */
+      public static final int DISABLED_VALUE = 0;
+      /**
+       * <code>FLUSH = 1;</code>
+       */
+      public static final int FLUSH_VALUE = 1;
+      /**
+       * <code>SKIPFLUSH = 2;</code>
+       */
+      public static final int SKIPFLUSH_VALUE = 2;
+
+
+      public final int getNumber() {
+        return value;
+      }
+
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static Type valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static Type forNumber(int value) {
+        switch (value) {
+          case 0: return DISABLED;
+          case 1: return FLUSH;
+          case 2: return SKIPFLUSH;
+          default: return null;
+        }
+      }
+
+      public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>
+          internalGetValueMap() {
+        return internalValueMap;
+      }
+      private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+          Type> internalValueMap =
+            new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Type>() {
+              public Type findValueByNumber(int number) {
+                return Type.forNumber(number);
+              }
+            };
+
+      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
+          getValueDescriptor() {
+        return getDescriptor().getValues().get(ordinal());
+      }
+      public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptorForType() {
+        return getDescriptor();
+      }
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0);
+      }
+
+      private static final Type[] VALUES = values();
+
+      public static Type valueOf(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+        if (desc.getType() != getDescriptor()) {
+          throw new java.lang.IllegalArgumentException(
+            "EnumValueDescriptor is not for this type.");
+        }
+        return VALUES[desc.getIndex()];
+      }
+
+      private final int value;
+
+      private Type(int value) {
+        this.value = value;
+      }
+
+      // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type)
+    }
+
+    private int bitField0_;
+    public static final int NAME_FIELD_NUMBER = 1;
+    private volatile java.lang.Object name_;
+    /**
+     * <code>required string name = 1;</code>
+     */
+    public boolean hasName() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required string name = 1;</code>
+     */
+    public java.lang.String getName() {
+      java.lang.Object ref = name_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          name_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string name = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getNameBytes() {
+      java.lang.Object ref = name_;
+      if (ref instanceof java.lang.String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        name_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int TABLE_FIELD_NUMBER = 2;
+    private volatile java.lang.Object table_;
+    /**
+     * <pre>
+     * not needed for delete, but checked for in taking snapshot
+     * </pre>
+     *
+     * <code>optional string table = 2;</code>
+     */
+    public boolean hasTable() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <pre>
+     * not needed for delete, but checked for in taking snapshot
+     * </pre>
+     *
+     * <code>optional string table = 2;</code>
+     */
+    public java.lang.String getTable() {
+      java.lang.Object ref = table_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          table_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <pre>
+     * not needed for delete, but checked for in taking snapshot
+     * </pre>
+     *
+     * <code>optional string table = 2;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getTableBytes() {
+      java.lang.Object ref = table_;
+      if (ref instanceof java.lang.String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        table_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int CREATION_TIME_FIELD_NUMBER = 3;
+    private long creationTime_;
+    /**
+     * <code>optional int64 creation_time = 3 [default = 0];</code>
+     */
+    public boolean hasCreationTime() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional int64 creation_time = 3 [default = 0];</code>
+     */
+    public long getCreationTime() {
+      return creationTime_;
+    }
+
+    public static final int TYPE_FIELD_NUMBER = 4;
+    private int type_;
+    /**
+     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+     */
+    public boolean hasType() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() {
+      org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.valueOf(type_);
+      return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH : result;
+    }
+
+    public static final int VERSION_FIELD_NUMBER = 5;
+    private int version_;
+    /**
+     * <code>optional int32 version = 5;</code>
+     */
+    public boolean hasVersion() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional int32 version = 5;</code>
+     */
+    public int getVersion() {
+      return version_;
+    }
+
+    public static final int OWNER_FIELD_NUMBER = 6;
+    private volatile java.lang.Object owner_;
+    /**
+     * <code>optional string owner = 6;</code>
+     */
+    public boolean hasOwner() {
+      return ((bitField0_ & 0x00000020) == 0x00000020);
+    }
+    /**
+     * <code>optional string owner = 6;</code>
+     */
+    public java.lang.String getOwner() {
+      java.lang.Object ref = owner_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          owner_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string owner = 6;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getOwnerBytes() {
+      java.lang.Object ref = owner_;
+      if (ref instanceof java.lang.String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        owner_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int USERS_AND_PERMISSIONS_FIELD_NUMBER = 7;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_;
+    /**
+     * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+     */
+    public boolean hasUsersAndPermissions() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() {
+      return usersAndPermissions_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance() : usersAndPermissions_;
+    }
+    /**
+     * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() {
+      return usersAndPermissions_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance() : usersAndPermissions_;
+    }
+
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      if (!hasName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (hasUsersAndPermissions()) {
+        if (!getUsersAndPermissions().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, table_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeInt64(3, creationTime_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeEnum(4, type_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeInt32(5, version_);
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 6, owner_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeMessage(7, getUsersAndPermissions());
+      }
+      unknownFields.writeTo(output);
+    }
+
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, table_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeInt64Size(3, creationTime_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeEnumSize(4, type_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeInt32Size(5, version_);
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(6, owner_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(7, getUsersAndPermissions());
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription other = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) obj;
+
+      boolean result = true;
+      result = result && (hasName() == other.hasName());
+      if (hasName()) {
+        result = result && getName()
+            .equals(other.getName());
+      }
+      result = result && (hasTable() == other.hasTable());
+      if (hasTable()) {
+        result = result && getTable()
+            .equals(other.getTable());
+      }
+      result = result && (hasCreationTime() == other.hasCreationTime());
+      if (hasCreationTime()) {
+        result = result && (getCreationTime()
+            == other.getCreationTime());
+      }
+      result = result && (hasType() == other.hasType());
+      if (hasType()) {
+        result = result && type_ == other.type_;
+      }
+      result = result && (hasVersion() == other.hasVersion());
+      if (hasVersion()) {
+        result = result && (getVersion()
+            == other.getVersion());
+      }
+      result = result && (hasOwner() == other.hasOwner());
+      if (hasOwner()) {
+        result = result && getOwner()
+            .equals(other.getOwner());
+      }
+      result = result && (hasUsersAndPermissions() == other.hasUsersAndPermissions());
+      if (hasUsersAndPermissions()) {
+        result = result && getUsersAndPermissions()
+            .equals(other.getUsersAndPermissions());
+      }
+      result = result && unknownFields.equals(other.unknownFields);
+      return result;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasName()) {
+        hash = (37 * hash) + NAME_FIELD_NUMBER;
+        hash = (53 * hash) + getName().hashCode();
+      }
+      if (hasTable()) {
+        hash = (37 * hash) + TABLE_FIELD_NUMBER;
+        hash = (53 * hash) + getTable().hashCode();
+      }
+      if (hasCreationTime()) {
+        hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER;
+        hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+            getCreationTime());
+      }
+      if (hasType()) {
+        hash = (37 * hash) + TYPE_FIELD_NUMBER;
+        hash = (53 * hash) + type_;
+      }
+      if (hasVersion()) {
+        hash = (37 * hash) + VERSION_FIELD_NUMBER;
+        hash = (53 * hash) + getVersion();
+      }
+      if (hasOwner()) {
+        hash = (37 * hash) + OWNER_FIELD_NUMBER;
+        hash = (53 * hash) + getOwner().hashCode();
+      }
+      if (hasUsersAndPermissions()) {
+        hash = (37 * hash) + USERS_AND_PERMISSIONS_FIELD_NUMBER;
+        hash = (53 * hash) + getUsersAndPermissions().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(byte[] data)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+        byte[] data,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(
+        java.io.InputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * <pre>
+     **
+     * Description of the snapshot to take
+     * </pre>
+     *
+     * Protobuf type {@code hbase.pb.SnapshotDescription}
+     */
+    public static final class Builder extends
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.SnapshotDescription)
+        org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder {
+      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
+      }
+
+      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+          getUsersAndPermissionsFieldBuilder();
+        }
+      }
+      public Builder clear() {
+        super.clear();
+        name_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        table_ = "";
+        bitField0_ = (bitField0_ & ~0x00000002);
+        creationTime_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        type_ = 1;
+        bitField0_ = (bitField0_ & ~0x00000008);
+        version_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000010);
+        owner_ = "";
+        bitField0_ = (bitField0_ & ~0x00000020);
+        if (usersAndPermissionsBuilder_ == null) {
+          usersAndPermissions_ = null;
+        } else {
+          usersAndPermissionsBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000040);
+        return this;
+      }
+
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription build() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription buildPartial() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription result = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.name_ = name_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.table_ = table_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.creationTime_ = creationTime_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.type_ = type_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        result.version_ = version_;
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000020;
+        }
+        result.owner_ = owner_;
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000040;
+        }
+        if (usersAndPermissionsBuilder_ == null) {
+          result.usersAndPermissions_ = usersAndPermissions_;
+        } else {
+          result.usersAndPermissions_ = usersAndPermissionsBuilder_.build();
+        }
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) {
+          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription other) {
+        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) return this;
+        if (other.hasName()) {
+          bitField0_ |= 0x00000001;
+          name_ = other.name_;
+          onChanged();
+        }
+        if (other.hasTable()) {
+          bitField0_ |= 0x00000002;
+          table_ = other.table_;
+          onChanged();
+        }
+        if (other.hasCreationTime()) {
+          setCreationTime(other.getCreationTime());
+        }
+        if (other.hasType()) {
+          setType(other.getType());
+        }
+        if (other.hasVersion()) {
+          setVersion(other.getVersion());
+        }
+        if (other.hasOwner()) {
+          bitField0_ |= 0x00000020;
+          owner_ = other.owner_;
+          onChanged();
+        }
+        if (other.hasUsersAndPermissions()) {
+          mergeUsersAndPermissions(other.getUsersAndPermissions());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasName()) {
+          return false;
+        }
+        if (hasUsersAndPermissions()) {
+          if (!getUsersAndPermissions().isInitialized()) {
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      private java.lang.Object name_ = "";
+      /**
+       * <code>required string name = 1;</code>
+       */
+      public boolean hasName() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string name = 1;</code>
+       */
+      public java.lang.String getName() {
+        java.lang.Object ref = name_;
+        if (!(ref instanceof java.lang.String)) {
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            name_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string name = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+          getNameBytes() {
+        java.lang.Object ref = name_;
+        if (ref instanceof String) {
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          name_ = b;
+          return b;
+        } else {
+          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string name = 1;</code>
+       */
+      public Builder setName(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        name_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string name = 1;</code>
+       */
+      public Builder clearName() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        name_ = getDefaultInstance().getName();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string name = 1;</code>
+       */
+      public Builder setNameBytes(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        name_ = value;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object table_ = "";
+      /**
+       * <pre>
+       * not needed for delete, but checked for in taking snapshot
+       * </pre>
+       *
+       * <code>optional string table = 2;</code>
+       */
+      public boolean hasTable() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <pre>
+       * not needed for delete, but checked for in taking snapshot
+       * </pre>
+       *
+       * <code>optional string table = 2;</code>
+       */
+      public java.lang.String getTable() {
+        java.lang.Object ref = table_;
+        if (!(ref instanceof java.lang.String)) {
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            table_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <pre>
+       * not needed for delete, but checked for in taking snapshot
+       * </pre>
+       *
+       * <code>optional string table = 2;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+          getTableBytes() {
+        java.lang.Object ref = table_;
+        if (ref instanceof String) {
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          table_ = b;
+          return b;
+        } else {
+          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <pre>
+       * not needed for delete, but checked for in taking snapshot
+       * </pre>
+       *
+       * <code>optional string table = 2;</code>
+       */
+      public Builder setTable(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        table_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <pre>
+       * not needed for delete, but checked for in taking snapshot
+       * </pre>
+       *
+       * <code>optional string table = 2;</code>
+       */
+      public Builder clearTable() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        table_ = getDefaultInstance().getTable();
+        onChanged();
+        return this;
+      }
+      /**
+       * <pre>
+       * not needed for delete, but checked for in taking snapshot
+       * </pre>
+       *
+       * <code>optional string table = 2;</code>
+       */
+      public Builder setTableBytes(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        table_ = value;
+        onChanged();
+        return this;
+      }
+
+      private long creationTime_ ;
+      /**
+       * <code>optional int64 creation_time = 3 [default = 0];</code>
+       */
+      public boolean hasCreationTime() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional int64 creation_time = 3 [default = 0];</code>
+       */
+      public long getCreationTime() {
+        return creationTime_;
+      }
+      /**
+       * <code>optional int64 creation_time = 3 [default = 0];</code>
+       */
+      public Builder setCreationTime(long value) {
+        bitField0_ |= 0x00000004;
+        creationTime_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int64 creation_time = 3 [default = 0];</code>
+       */
+      public Builder clearCreationTime() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        creationTime_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      private int type_ = 1;
+      /**
+       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+       */
+      public boolean hasType() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type result = org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.valueOf(type_);
+        return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH : result;
+      }
+      /**
+       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+       */
+      public Builder setType(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        bitField0_ |= 0x00000008;
+        type_ = value.getNumber();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+       */
+      public Builder clearType() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        type_ = 1;
+        onChanged();
+        return this;
+      }
+
+      private int version_ ;
+      /**
+       * <code>optional int32 version = 5;</code>
+       */
+      public boolean hasVersion() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      /**
+       * <code>optional int32 version = 5;</code>
+       */
+      public int getVersion() {
+        return version_;
+      }
+      /**
+       * <code>optional int32 version = 5;</code>
+       */
+      public Builder setVersion(int value) {
+        bitField0_ |= 0x00000010;
+        version_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int32 version = 5;</code>
+       */
+      public Builder clearVersion() {
+        bitField0_ = (bitField0_ & ~0x00000010);
+        version_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private java.lang.Object owner_ = "";
+      /**
+       * <code>optional string owner = 6;</code>
+       */
+      public boolean hasOwner() {
+        return ((bitField0_ & 0x00000020) == 0x00000020);
+      }
+      /**
+       * <code>optional string owner = 6;</code>
+       */
+      public java.lang.String getOwner() {
+        java.lang.Object ref = owner_;
+        if (!(ref instanceof java.lang.String)) {
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            owner_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string owner = 6;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+          getOwnerBytes() {
+        java.lang.Object ref = owner_;
+        if (ref instanceof String) {
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          owner_ = b;
+          return b;
+        } else {
+          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string owner = 6;</code>
+       */
+      public Builder setOwner(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000020;
+        owner_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string owner = 6;</code>
+       */
+      public Builder clearOwner() {
+        bitField0_ = (bitField0_ & ~0x00000020);
+        owner_ = getDefaultInstance().getOwner();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string owner = 6;</code>
+       */
+      public Builder setOwnerBytes(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000020;
+        owner_ = value;
+        onChanged();
+        return this;
+      }
+
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_ = null;
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder> usersAndPermissionsBuilder_;
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public boolean hasUsersAndPermissions() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() {
+        if (usersAndPermissionsBuilder_ == null) {
+          return usersAndPermissions_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance() : usersAndPermissions_;
+        } else {
+          return usersAndPermissionsBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public Builder setUsersAndPermissions(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions value) {
+        if (usersAndPermissionsBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          usersAndPermissions_ = value;
+          onChanged();
+        } else {
+          usersAndPermissionsBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000040;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public Builder setUsersAndPermissions(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder builderForValue) {
+        if (usersAndPermissionsBuilder_ == null) {
+          usersAndPermissions_ = builderForValue.build();
+          onChanged();
+        } else {
+          usersAndPermissionsBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000040;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public Builder mergeUsersAndPermissions(org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions value) {
+        if (usersAndPermissionsBuilder_ == null) {
+          if (((bitField0_ & 0x00000040) == 0x00000040) &&
+              usersAndPermissions_ != null &&
+              usersAndPermissions_ != org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance()) {
+            usersAndPermissions_ =
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.newBuilder(usersAndPermissions_).mergeFrom(value).buildPartial();
+          } else {
+            usersAndPermissions_ = value;
+          }
+          onChanged();
+        } else {
+          usersAndPermissionsBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000040;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public Builder clearUsersAndPermissions() {
+        if (usersAndPermissionsBuilder_ == null) {
+          usersAndPermissions_ = null;
+          onChanged();
+        } else {
+          usersAndPermissionsBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000040);
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder getUsersAndPermissionsBuilder() {
+        bitField0_ |= 0x00000040;
+        onChanged();
+        return getUsersAndPermissionsFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() {
+        if (usersAndPermissionsBuilder_ != null) {
+          return usersAndPermissionsBuilder_.getMessageOrBuilder();
+        } else {
+          return usersAndPermissions_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance() : usersAndPermissions_;
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+       */
+      private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>
+          getUsersAndPermissionsFieldBuilder() {
+        if (usersAndPermissionsBuilder_ == null) {
+          usersAndPermissionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
+              org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>(
+                  getUsersAndPermissions(),
+                  getParentForChildren(),
+                  isClean());
+          usersAndPermissions_ = null;
+        }
+        return usersAndPermissionsBuilder_;
+      }
+      public final Builder setUnknownFields(
+          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription)
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription)
+    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription();
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SnapshotDescription>
+        PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<SnapshotDescription>() {
+      public SnapshotDescription parsePartialFrom(
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+          return new SnapshotDescription(input, extensionRegistry);
+      }
+    };
+
+    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SnapshotDescription> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SnapshotDescription> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
   public interface SnapshotFileInfoOrBuilder extends
       // @@protoc_insertion_point(interface_extends:hbase.pb.SnapshotFileInfo)
       org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
@@ -4845,6 +6365,11 @@ public final class SnapshotProtos {
   }
 
   private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+    internal_static_hbase_pb_SnapshotDescription_descriptor;
+  private static final
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable;
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
     internal_static_hbase_pb_SnapshotFileInfo_descriptor;
   private static final 
     org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
@@ -4878,25 +6403,33 @@ public final class SnapshotProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\016Snapshot.proto\022\010hbase.pb\032\010FS.proto\032\013HB" +
-      "ase.proto\"\222\001\n\020SnapshotFileInfo\022-\n\004type\030\001" +
-      " \002(\0162\037.hbase.pb.SnapshotFileInfo.Type\022\r\n" +
-      "\005hfile\030\003 \001(\t\022\022\n\nwal_server\030\004 \001(\t\022\020\n\010wal_" +
-      "name\030\005 \001(\t\"\032\n\004Type\022\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\323" +
-      "\002\n\026SnapshotRegionManifest\022\017\n\007version\030\001 \001" +
-      "(\005\022)\n\013region_info\030\002 \002(\0132\024.hbase.pb.Regio" +
-      "nInfo\022B\n\014family_files\030\003 \003(\0132,.hbase.pb.S" +
-      "napshotRegionManifest.FamilyFiles\032T\n\tSto" +
-      "reFile\022\014\n\004name\030\001 \002(\t\022&\n\treference\030\002 \001(\0132",
-      "\023.hbase.pb.Reference\022\021\n\tfile_size\030\003 \001(\004\032" +
-      "c\n\013FamilyFiles\022\023\n\013family_name\030\001 \002(\014\022?\n\013s" +
-      "tore_files\030\002 \003(\0132*.hbase.pb.SnapshotRegi" +
-      "onManifest.StoreFile\"\177\n\024SnapshotDataMani" +
-      "fest\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Ta" +
-      "bleSchema\022:\n\020region_manifests\030\002 \003(\0132 .hb" +
-      "ase.pb.SnapshotRegionManifestBK\n1org.apa" +
-      "che.hadoop.hbase.shaded.protobuf.generat" +
-      "edB\016SnapshotProtosH\001\210\001\001\240\001\001"
+      "\n\016Snapshot.proto\022\010hbase.pb\032\023AccessContro" +
+      "l.proto\032\010FS.proto\032\013HBase.proto\"\223\002\n\023Snaps" +
+      "hotDescription\022\014\n\004name\030\001 \002(\t\022\r\n\005table\030\002 " +
+      "\001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\0227\n\004type\030\004 " +
+      "\001(\0162\".hbase.pb.SnapshotDescription.Type:" +
+      "\005FLUSH\022\017\n\007version\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\022<" +
+      "\n\025users_and_permissions\030\007 \001(\0132\035.hbase.pb" +
+      ".UsersAndPermissions\".\n\004Type\022\014\n\010DISABLED" +
+      "\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002\"\222\001\n\020Snapsho" +
+      "tFileInfo\022-\n\004type\030\001 \002(\0162\037.hbase.pb.Snaps",
+      "hotFileInfo.Type\022\r\n\005hfile\030\003 \001(\t\022\022\n\nwal_s" +
+      "erver\030\004 \001(\t\022\020\n\010wal_name\030\005 \001(\t\"\032\n\004Type\022\t\n" +
+      "\005HFILE\020\001\022\007\n\003WAL\020\002\"\323\002\n\026SnapshotRegionMani" +
+      "fest\022\017\n\007version\030\001 \001(\005\022)\n\013region_info\030\002 \002" +
+      "(\0132\024.hbase.pb.RegionInfo\022B\n\014family_files" +
+      "\030\003 \003(\0132,.hbase.pb.SnapshotRegionManifest" +
+      ".FamilyFiles\032T\n\tStoreFile\022\014\n\004name\030\001 \002(\t\022" +
+      "&\n\treference\030\002 \001(\0132\023.hbase.pb.Reference\022" +
+      "\021\n\tfile_size\030\003 \001(\004\032c\n\013FamilyFiles\022\023\n\013fam" +
+      "ily_name\030\001 \002(\014\022?\n\013store_files\030\002 \003(\0132*.hb",
+      "ase.pb.SnapshotRegionManifest.StoreFile\"" +
+      "\177\n\024SnapshotDataManifest\022+\n\014table_schema\030" +
+      "\001 \002(\0132\025.hbase.pb.TableSchema\022:\n\020region_m" +
+      "anifests\030\002 \003(\0132 .hbase.pb.SnapshotRegion" +
+      "ManifestBK\n1org.apache.hadoop.hbase.shad" +
+      "ed.protobuf.generatedB\016SnapshotProtosH\001\210" +
+      "\001\001\240\001\001"
     };
     org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
         new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
@@ -4909,17 +6442,24 @@ public final class SnapshotProtos {
     org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
       .internalBuildGeneratedFileFrom(descriptorData,
         new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
+          org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.getDescriptor(),
           org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.getDescriptor(),
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor(),
         }, assigner);
-    internal_static_hbase_pb_SnapshotFileInfo_descriptor =
+    internal_static_hbase_pb_SnapshotDescription_descriptor =
       getDescriptor().getMessageTypes().get(0);
+    internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_hbase_pb_SnapshotDescription_descriptor,
+        new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", "UsersAndPermissions", });
+    internal_static_hbase_pb_SnapshotFileInfo_descriptor =
+      getDescriptor().getMessageTypes().get(1);
     internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_SnapshotFileInfo_descriptor,
         new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", });
     internal_static_hbase_pb_SnapshotRegionManifest_descriptor =
-      getDescriptor().getMessageTypes().get(1);
+      getDescriptor().getMessageTypes().get(2);
     internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_SnapshotRegionManifest_descriptor,
@@ -4937,11 +6477,12 @@ public final class SnapshotProtos {
         internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor,
         new java.lang.String[] { "FamilyName", "StoreFiles", });
     internal_static_hbase_pb_SnapshotDataManifest_descriptor =
-      getDescriptor().getMessageTypes().get(2);
+      getDescriptor().getMessageTypes().get(3);
     internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new
       org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_hbase_pb_SnapshotDataManifest_descriptor,
         new java.lang.String[] { "TableSchema", "RegionManifests", });
+    org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.getDescriptor();
     org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.getDescriptor();
     org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/protobuf/AccessControl.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/AccessControl.proto b/hbase-protocol-shaded/src/main/protobuf/AccessControl.proto
new file mode 100644
index 0000000..39143e2
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/protobuf/AccessControl.proto
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated";
+option java_outer_classname = "AccessControlProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "HBase.proto";
+
+/**
+* Messages and services in shaded AccessControl.proto only use for serializing/deserializing permissions
+* in .snapshotinfo, and should not use for access control logic for coprocessor endpoints compatibility
+* (use AccessControl.proto under hbase-protocol module instead).
+*/
+
+message Permission {
+    enum Action {
+        READ = 0;
+        WRITE = 1;
+        EXEC = 2;
+        CREATE = 3;
+        ADMIN = 4;
+    }
+    enum Type {
+        Global = 1;
+        Namespace = 2;
+        Table = 3;
+    }
+    required Type type = 1;
+    optional GlobalPermission global_permission = 2;
+    optional NamespacePermission namespace_permission = 3;
+    optional TablePermission table_permission = 4;
+}
+
+message TablePermission {
+    optional TableName table_name = 1;
+    optional bytes family = 2;
+    optional bytes qualifier = 3;
+    repeated Permission.Action action = 4;
+}
+
+message NamespacePermission {
+    optional bytes namespace_name = 1;
+    repeated Permission.Action action = 2;
+}
+
+message GlobalPermission {
+    repeated Permission.Action action = 1;
+}
+
+message UserPermission {
+    required bytes user = 1;
+    required Permission permission = 3;
+}
+
+/**
+ * Content of the /hbase/acl/<table or namespace> znode.
+ */
+message UsersAndPermissions {
+  message UserPermissions {
+    required bytes user = 1;
+    repeated Permission permissions = 2;
+  }
+
+  repeated UserPermissions user_permissions = 1;
+}
+
+message GrantRequest {
+  required UserPermission user_permission = 1;
+  optional bool merge_existing_permissions = 2 [default = false];
+}
+
+message GrantResponse {
+}
+
+message RevokeRequest {
+  required UserPermission user_permission = 1;
+}
+
+message RevokeResponse {
+}
+
+message GetUserPermissionsRequest {
+  optional Permission.Type type = 1;
+  optional TableName table_name = 2;
+  optional bytes namespace_name = 3;
+}
+
+message GetUserPermissionsResponse {
+  repeated UserPermission user_permission = 1;
+}
+
+message CheckPermissionsRequest {
+  repeated Permission permission = 1;
+}
+
+message CheckPermissionsResponse {
+}
+
+service AccessControlService {
+    rpc Grant(GrantRequest)
+      returns (GrantResponse);
+
+    rpc Revoke(RevokeRequest)
+      returns (RevokeResponse);
+
+    rpc GetUserPermissions(GetUserPermissionsRequest)
+      returns (GetUserPermissionsResponse);
+
+    rpc CheckPermissions(CheckPermissionsRequest)
+      returns (CheckPermissionsResponse);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/protobuf/HBase.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/HBase.proto b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
index 9b6b556..10742ad 100644
--- a/hbase-protocol-shaded/src/main/protobuf/HBase.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
@@ -169,22 +169,7 @@ message NameInt64Pair {
   optional int64 value = 2;
 }
 
-/**
- * Description of the snapshot to take
- */
-message SnapshotDescription {
-  required string name = 1;
-  optional string table = 2; // not needed for delete, but checked for in taking snapshot
-  optional int64 creation_time = 3 [default = 0];
-  enum Type {
-    DISABLED = 0;
-    FLUSH = 1;
-    SKIPFLUSH = 2;
-  }
-  optional Type type = 4 [default = FLUSH];
-  optional int32 version = 5;
-  optional string owner = 6;
-}
+
 
 /**
  * Description of the distributed procedure to take

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/protobuf/Master.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/Master.proto b/hbase-protocol-shaded/src/main/protobuf/Master.proto
index 0c3da02..b38ea57 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Master.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Master.proto
@@ -34,6 +34,7 @@ import "LockService.proto";
 import "Procedure.proto";
 import "Quota.proto";
 import "Replication.proto";
+import "Snapshot.proto";
 
 /* Column-level protobufs */
 
@@ -405,6 +406,7 @@ message RestoreSnapshotRequest {
   required SnapshotDescription snapshot = 1;
   optional uint64 nonce_group = 2 [default = 0];
   optional uint64 nonce = 3 [default = 0];
+  optional bool restoreACL = 4 [default = false];
 }
 
 message RestoreSnapshotResponse {

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index ef3f973..2c70882 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -25,6 +25,7 @@ option optimize_for = SPEED;
 
 import "HBase.proto";
 import "RPC.proto";
+import "Snapshot.proto";
 
 // ============================================================================
 //  WARNING - Compatibility rules
@@ -235,6 +236,7 @@ enum CloneSnapshotState {
   CLONE_SNAPSHOT_ASSIGN_REGIONS = 4;
   CLONE_SNAPSHOT_UPDATE_DESC_CACHE = 5;
   CLONE_SNAPSHOT_POST_OPERATION = 6;
+  CLONE_SNAPHOST_RESTORE_ACL = 7;
 }
 
 message CloneSnapshotStateData {
@@ -250,6 +252,7 @@ enum RestoreSnapshotState {
   RESTORE_SNAPSHOT_UPDATE_TABLE_DESCRIPTOR = 2;
   RESTORE_SNAPSHOT_WRITE_FS_LAYOUT = 3;
   RESTORE_SNAPSHOT_UPDATE_META = 4;
+  RESTORE_SNAPSHOT_RESTORE_ACL = 5;
 }
 
 message RestoreSnapshotStateData {

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-protocol-shaded/src/main/protobuf/Snapshot.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/Snapshot.proto b/hbase-protocol-shaded/src/main/protobuf/Snapshot.proto
index a73c7de..595a8cf 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Snapshot.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Snapshot.proto
@@ -23,9 +23,28 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 option optimize_for = SPEED;
 
+import "AccessControl.proto";
 import "FS.proto";
 import "HBase.proto";
 
+/**
+ * Description of the snapshot to take
+ */
+message SnapshotDescription {
+  required string name = 1;
+  optional string table = 2; // not needed for delete, but checked for in taking snapshot
+  optional int64 creation_time = 3 [default = 0];
+  enum Type {
+    DISABLED = 0;
+    FLUSH = 1;
+    SKIPFLUSH = 2;
+  }
+  optional Type type = 4 [default = FLUSH];
+  optional int32 version = 5;
+  optional string owner = 6;
+  optional UsersAndPermissions users_and_permissions = 7;
+}
+
 message SnapshotFileInfo {
   enum Type {
     HFILE = 1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index 14907ba..fd93b3b 100644
--- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.MoveTablesR
 import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RSGroupAdminService;
 import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupRequest;
 import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 
 @InterfaceAudience.Private
 public class RSGroupAdminEndpoint implements MasterObserver, CoprocessorService {
@@ -324,7 +324,7 @@ public class RSGroupAdminEndpoint implements MasterObserver, CoprocessorService
 
   @Override
   public void preModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx,
-                                 NamespaceDescriptor ns) throws IOException {
+      NamespaceDescriptor ns) throws IOException {
     preCreateNamespace(ctx, ns);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java
index f744ecb..0cfe099 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -83,7 +83,7 @@ public class RestoreTool {
   /**
    * return value represent path for:
    * ".../user/biadmin/backup1/default/t1_dn/backup_1396650096738/archive/data/default/t1_dn"
-   * @param tabelName table name
+   * @param tableName table name
    * @return path to table archive
    * @throws IOException exception
    */

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index ad8aa14..cf75c72 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.net.Address;
 import org.apache.hadoop.hbase.procedure2.LockInfo;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index 69beef8..d87468a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 762dda0..02e7aa0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -153,7 +153,7 @@ import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -2309,7 +2309,7 @@ public class HMaster extends HRegionServer implements MasterServices {
   }
 
   public long restoreSnapshot(final SnapshotDescription snapshotDesc,
-      final long nonceGroup, final long nonce) throws IOException {
+      final long nonceGroup, final long nonce, final boolean restoreAcl) throws IOException {
     checkInitialized();
     getSnapshotManager().checkSnapshotSupport();
 
@@ -2321,7 +2321,8 @@ public class HMaster extends HRegionServer implements MasterServices {
         new MasterProcedureUtil.NonceProcedureRunnable(this, nonceGroup, nonce) {
       @Override
       protected void run() throws IOException {
-        setProcId(getSnapshotManager().restoreOrCloneSnapshot(snapshotDesc, getNonceKey()));
+          setProcId(
+            getSnapshotManager().restoreOrCloneSnapshot(snapshotDesc, getNonceKey(), restoreAcl));
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index 2f5e66e..6064f9b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -54,8 +54,8 @@ import org.apache.hadoop.hbase.procedure2.LockInfo;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 
 /**
  * Provides the coprocessor framework and environment for master oriented

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 40c4a71..baf962d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -80,7 +80,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringP
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
@@ -116,6 +115,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Remov
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -1221,8 +1221,8 @@ public class MasterRpcServices extends RSRpcServices
   public RestoreSnapshotResponse restoreSnapshot(RpcController controller,
       RestoreSnapshotRequest request) throws ServiceException {
     try {
-      long procId = master.restoreSnapshot(request.getSnapshot(),
-          request.getNonceGroup(), request.getNonce());
+      long procId = master.restoreSnapshot(request.getSnapshot(), request.getNonceGroup(),
+        request.getNonce(), request.getRestoreACL());
       return RestoreSnapshotResponse.newBuilder().setProcId(procId).build();
     } catch (ForeignException e) {
       throw new ServiceException(e.getCause());

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
index defd5cf..15e1855 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.master;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 
 /**
  * Watch the current snapshot under process


[2/8] hbase git commit: HBASE-11013: Clone Snapshots on Secure Cluster Should provide option to apply Retained User Permissions

Posted by zg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
index 347d01d..683d840 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
@@ -48,9 +48,9 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotState;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
@@ -69,6 +69,7 @@ public class CloneSnapshotProcedure
 
   private HTableDescriptor hTableDescriptor;
   private SnapshotDescription snapshot;
+  private boolean restoreAcl;
   private List<HRegionInfo> newRegions = null;
   private Map<String, Pair<String, String> > parentsToChildrenPairMap = new HashMap<>();
 
@@ -83,6 +84,11 @@ public class CloneSnapshotProcedure
   public CloneSnapshotProcedure() {
   }
 
+  public CloneSnapshotProcedure(final MasterProcedureEnv env,
+      final HTableDescriptor hTableDescriptor, final SnapshotDescription snapshot) {
+    this(env, hTableDescriptor, snapshot, false);
+  }
+
   /**
    * Constructor
    * @param env MasterProcedureEnv
@@ -90,10 +96,12 @@ public class CloneSnapshotProcedure
    * @param snapshot snapshot to clone from
    */
   public CloneSnapshotProcedure(final MasterProcedureEnv env,
-      final HTableDescriptor hTableDescriptor, final SnapshotDescription snapshot) {
+      final HTableDescriptor hTableDescriptor, final SnapshotDescription snapshot,
+      final boolean restoreAcl) {
     super(env);
     this.hTableDescriptor = hTableDescriptor;
     this.snapshot = snapshot;
+    this.restoreAcl = restoreAcl;
 
     getMonitorStatus();
   }
@@ -109,6 +117,14 @@ public class CloneSnapshotProcedure
     return monitorStatus;
   }
 
+  private void restoreSnapshotAcl(MasterProcedureEnv env) throws IOException {
+    Configuration conf = env.getMasterServices().getConfiguration();
+    if (restoreAcl && snapshot.hasUsersAndPermissions() && snapshot.getUsersAndPermissions() != null
+        && SnapshotDescriptionUtils.isSecurityAvailable(conf)) {
+      RestoreSnapshotHelper.restoreSnapshotAcl(snapshot, hTableDescriptor.getTableName(), conf);
+    }
+  }
+
   @Override
   protected Flow executeFromState(final MasterProcedureEnv env, final CloneSnapshotState state)
       throws InterruptedException {
@@ -138,6 +154,10 @@ public class CloneSnapshotProcedure
           break;
         case CLONE_SNAPSHOT_UPDATE_DESC_CACHE:
           CreateTableProcedure.updateTableDescCache(env, getTableName());
+          setNextState(CloneSnapshotState.CLONE_SNAPHOST_RESTORE_ACL);
+          break;
+        case CLONE_SNAPHOST_RESTORE_ACL:
+          restoreSnapshotAcl(env);
           setNextState(CloneSnapshotState.CLONE_SNAPSHOT_POST_OPERATION);
           break;
         case CLONE_SNAPSHOT_POST_OPERATION:

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
index f8c9d8f..21709f8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
@@ -48,9 +48,9 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotState;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
@@ -69,6 +69,7 @@ public class RestoreSnapshotProcedure
   private Map<String, Pair<String, String>> parentsToChildrenPairMap = new HashMap<>();
 
   private SnapshotDescription snapshot;
+  private boolean restoreAcl;
 
   // Monitor
   private MonitoredTask monitorStatus = null;
@@ -81,6 +82,10 @@ public class RestoreSnapshotProcedure
   public RestoreSnapshotProcedure() {
   }
 
+  public RestoreSnapshotProcedure(final MasterProcedureEnv env,
+      final HTableDescriptor hTableDescriptor, final SnapshotDescription snapshot) {
+    this(env, hTableDescriptor, snapshot, false);
+  }
   /**
    * Constructor
    * @param env MasterProcedureEnv
@@ -91,12 +96,14 @@ public class RestoreSnapshotProcedure
   public RestoreSnapshotProcedure(
       final MasterProcedureEnv env,
       final HTableDescriptor hTableDescriptor,
-      final SnapshotDescription snapshot) {
+      final SnapshotDescription snapshot,
+      final boolean restoreAcl) {
     super(env);
     // This is the new schema we are going to write out as this modification.
     this.modifiedHTableDescriptor = hTableDescriptor;
     // Snapshot information
     this.snapshot = snapshot;
+    this.restoreAcl = restoreAcl;
 
     // Monitor
     getMonitorStatus();
@@ -140,6 +147,10 @@ public class RestoreSnapshotProcedure
           break;
         case RESTORE_SNAPSHOT_UPDATE_META:
           updateMETA(env);
+          setNextState(RestoreSnapshotState.RESTORE_SNAPSHOT_RESTORE_ACL);
+          break;
+        case RESTORE_SNAPSHOT_RESTORE_ACL:
+          restoreSnapshotAcl(env);
           return Flow.NO_MORE_STATE;
         default:
           throw new UnsupportedOperationException("unhandled state=" + state);
@@ -474,6 +485,16 @@ public class RestoreSnapshotProcedure
       monitorStatus.getCompletionTimestamp() - monitorStatus.getStartTime());
   }
 
+  private void restoreSnapshotAcl(final MasterProcedureEnv env) throws IOException {
+    if (restoreAcl && snapshot.hasUsersAndPermissions() && snapshot.getUsersAndPermissions() != null
+        && SnapshotDescriptionUtils
+            .isSecurityAvailable(env.getMasterServices().getConfiguration())) {
+      // restore acl of snapshot to table.
+      RestoreSnapshotHelper.restoreSnapshotAcl(snapshot, TableName.valueOf(snapshot.getTable()),
+        env.getMasterServices().getConfiguration());
+    }
+  }
+
   /**
    * Make sure that region states of the region list is in OFFLINE state.
    * @param env MasterProcedureEnv

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
index 5f86e08..c23ca6d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.mob.MobUtils;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
 import org.apache.hadoop.hbase.util.FSUtils;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
index 73cd4d7..8d6568f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.mob.MobUtils;
 import org.apache.hadoop.hbase.procedure.Procedure;
 import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
 import org.apache.hadoop.hbase.util.Pair;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
index de46268..0448f92 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.mob.MobUtils;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
index 55d58e0..b81c7db 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
@@ -67,10 +67,10 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription.Type;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription.Type;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
@@ -680,7 +680,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
    */
   private long cloneSnapshot(final SnapshotDescription reqSnapshot, final TableName tableName,
       final SnapshotDescription snapshot, final HTableDescriptor snapshotTableDesc,
-      final NonceKey nonceKey) throws IOException {
+      final NonceKey nonceKey, final boolean restoreAcl) throws IOException {
     MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost();
     HTableDescriptor htd = new HTableDescriptor(tableName, snapshotTableDesc);
     if (cpHost != null) {
@@ -688,7 +688,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
     }
     long procId;
     try {
-      procId = cloneSnapshot(snapshot, htd, nonceKey);
+      procId = cloneSnapshot(snapshot, htd, nonceKey, restoreAcl);
     } catch (IOException e) {
       LOG.error("Exception occurred while cloning the snapshot " + snapshot.getName()
         + " as table " + tableName.getNameAsString(), e);
@@ -712,7 +712,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
    * @return procId the ID of the clone snapshot procedure
    */
   synchronized long cloneSnapshot(final SnapshotDescription snapshot,
-      final HTableDescriptor hTableDescriptor, final NonceKey nonceKey)
+      final HTableDescriptor hTableDescriptor, final NonceKey nonceKey, final boolean restoreAcl)
       throws HBaseSnapshotException {
     TableName tableName = hTableDescriptor.getTableName();
 
@@ -728,8 +728,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
 
     try {
       long procId = master.getMasterProcedureExecutor().submitProcedure(
-        new CloneSnapshotProcedure(
-          master.getMasterProcedureExecutor().getEnvironment(), hTableDescriptor, snapshot),
+        new CloneSnapshotProcedure(master.getMasterProcedureExecutor().getEnvironment(),
+            hTableDescriptor, snapshot, restoreAcl),
         nonceKey);
       this.restoreTableToProcIdMap.put(tableName, procId);
       return procId;
@@ -747,8 +747,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
    * @param nonceKey unique identifier to prevent duplicated RPC
    * @throws IOException
    */
-  public long restoreOrCloneSnapshot(final SnapshotDescription reqSnapshot, final NonceKey nonceKey)
-      throws IOException {
+  public long restoreOrCloneSnapshot(final SnapshotDescription reqSnapshot, final NonceKey nonceKey,
+      final boolean restoreAcl) throws IOException {
     FileSystem fs = master.getMasterFileSystem().getFileSystem();
     Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(reqSnapshot, rootDir);
 
@@ -777,28 +777,30 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
     // Execute the restore/clone operation
     long procId;
     if (MetaTableAccessor.tableExists(master.getConnection(), tableName)) {
-      procId = restoreSnapshot(reqSnapshot, tableName, snapshot, snapshotTableDesc, nonceKey);
+      procId = restoreSnapshot(reqSnapshot, tableName, snapshot, snapshotTableDesc, nonceKey,
+        restoreAcl);
     } else {
-      procId = cloneSnapshot(reqSnapshot, tableName, snapshot, snapshotTableDesc, nonceKey);
+      procId =
+          cloneSnapshot(reqSnapshot, tableName, snapshot, snapshotTableDesc, nonceKey, restoreAcl);
     }
     return procId;
   }
 
   /**
-   * Restore the specified snapshot.
-   * The restore will fail if the destination table has a snapshot or restore in progress.
-   *
+   * Restore the specified snapshot. The restore will fail if the destination table has a snapshot
+   * or restore in progress.
    * @param reqSnapshot Snapshot Descriptor from request
    * @param tableName table to restore
    * @param snapshot Snapshot Descriptor
    * @param snapshotTableDesc Table Descriptor
    * @param nonceKey unique identifier to prevent duplicated RPC
+   * @param restoreAcl true to restore acl of snapshot
    * @return procId the ID of the restore snapshot procedure
    * @throws IOException
    */
   private long restoreSnapshot(final SnapshotDescription reqSnapshot, final TableName tableName,
       final SnapshotDescription snapshot, final HTableDescriptor snapshotTableDesc,
-      final NonceKey nonceKey) throws IOException {
+      final NonceKey nonceKey, final boolean restoreAcl) throws IOException {
     MasterCoprocessorHost cpHost = master.getMasterCoprocessorHost();
 
     if (master.getTableStateManager().isTableState(
@@ -815,7 +817,7 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
 
     long procId;
     try {
-      procId = restoreSnapshot(snapshot, snapshotTableDesc, nonceKey);
+      procId = restoreSnapshot(snapshot, snapshotTableDesc, nonceKey, restoreAcl);
     } catch (IOException e) {
       LOG.error("Exception occurred while restoring the snapshot " + snapshot.getName()
         + " as table " + tableName.getNameAsString(), e);
@@ -831,16 +833,16 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
   }
 
   /**
-   * Restore the specified snapshot.
-   * The restore will fail if the destination table has a snapshot or restore in progress.
-   *
+   * Restore the specified snapshot. The restore will fail if the destination table has a snapshot
+   * or restore in progress.
    * @param snapshot Snapshot Descriptor
    * @param hTableDescriptor Table Descriptor
    * @param nonceKey unique identifier to prevent duplicated RPC
+   * @param restoreAcl true to restore acl of snapshot
    * @return procId the ID of the restore snapshot procedure
    */
   private synchronized long restoreSnapshot(final SnapshotDescription snapshot,
-      final HTableDescriptor hTableDescriptor, final NonceKey nonceKey)
+      final HTableDescriptor hTableDescriptor, final NonceKey nonceKey, final boolean restoreAcl)
       throws HBaseSnapshotException {
     final TableName tableName = hTableDescriptor.getTableName();
 
@@ -856,8 +858,8 @@ public class SnapshotManager extends MasterProcedureManager implements Stoppable
 
     try {
       long procId = master.getMasterProcedureExecutor().submitProcedure(
-        new RestoreSnapshotProcedure(
-          master.getMasterProcedureExecutor().getEnvironment(), hTableDescriptor, snapshot),
+        new RestoreSnapshotProcedure(master.getMasterProcedureExecutor().getEnvironment(),
+            hTableDescriptor, snapshot, restoreAcl),
         nonceKey);
       this.restoreTableToProcIdMap.put(tableName, procId);
       return procId;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
index 123758f..fce4eaa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
@@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.master.locking.LockManager;
 import org.apache.hadoop.hbase.master.locking.LockProcedure;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 91fb44b..a4a7537 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -158,7 +158,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
index 248ccdc..9c42e4d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
@@ -28,10 +28,10 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.procedure.ProcedureMember;
 import org.apache.hadoop.hbase.procedure.Subprocedure;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.SnapshotSubprocedurePool;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
index 7b43c3d..a11844d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
@@ -51,10 +51,10 @@ import org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
 import org.apache.hadoop.hbase.procedure.Subprocedure;
 import org.apache.hadoop.hbase.procedure.SubprocedureFactory;
 import org.apache.hadoop.hbase.procedure.ZKProcedureMemberRpcs;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.zookeeper.KeeperException;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index c1aad93..b719eba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -461,7 +461,7 @@ public class AccessControlLists {
     return allPerms;
   }
 
-  static ListMultimap<String, TablePermission> getTablePermissions(Configuration conf,
+  public static ListMultimap<String, TablePermission> getTablePermissions(Configuration conf,
       TableName tableName) throws IOException {
     return getPermissions(conf, tableName != null ? tableName.getName() : null, null);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 8a6eb96..aa0c094 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -113,8 +113,8 @@ import org.apache.hadoop.hbase.security.access.Permission.Action;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.util.ByteRange;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index e3ad951..e80410f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.WALLink;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mob.MobUtils;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 63839c4..683c404 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -29,10 +29,12 @@ import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.ThreadPoolExecutor;
 
+import com.google.common.collect.ListMultimap;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -46,13 +48,17 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.backup.HFileArchiver;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.Reference;
 import org.apache.hadoop.hbase.mob.MobUtils;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.security.access.AccessControlClient;
+import org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil;
+import org.apache.hadoop.hbase.security.access.TablePermission;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
@@ -825,4 +831,25 @@ public class RestoreSnapshotHelper {
     }
     return metaChanges;
   }
+
+  public static void restoreSnapshotAcl(SnapshotDescription snapshot, TableName newTableName,
+      Configuration conf) throws IOException {
+    if (snapshot.hasUsersAndPermissions() && snapshot.getUsersAndPermissions() != null) {
+      LOG.info("Restore snapshot acl to table. snapshot: " + snapshot + ", table: " + newTableName);
+      ListMultimap<String, TablePermission> perms =
+          ShadedAccessControlUtil.toUserTablePermissions(snapshot.getUsersAndPermissions());
+      try (Connection conn = ConnectionFactory.createConnection(conf)) {
+        for (Entry<String, TablePermission> e : perms.entries()) {
+          String user = e.getKey();
+          TablePermission perm = e.getValue();
+          perm.setTableName(newTableName);
+          AccessControlClient.grant(conn, perm.getTableName(), user, perm.getFamily(),
+            perm.getQualifier(), perm.getActions());
+        }
+      } catch (Throwable e) {
+        throw new IOException("Grant acl into newly creatd table failed. snapshot: " + snapshot
+            + ", table: " + newTableName, e);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
index 0a482d8..c471337 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
@@ -17,10 +17,11 @@
  */
 package org.apache.hadoop.hbase.snapshot;
 
-import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 import java.util.Collections;
 
+import com.google.common.collect.ListMultimap;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -30,11 +31,17 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.security.access.AccessControlLists;
+import org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil;
+import org.apache.hadoop.hbase.security.access.TablePermission;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.snapshot.SnapshotManifestV2;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 
@@ -247,10 +254,10 @@ public final class SnapshotDescriptionUtils {
    *           {@link SnapshotDescription}.
    */
   public static SnapshotDescription validate(SnapshotDescription snapshot, Configuration conf)
-      throws IllegalArgumentException {
+      throws IllegalArgumentException, IOException {
     if (!snapshot.hasTable()) {
       throw new IllegalArgumentException(
-        "Descriptor doesn't apply to a table, so we can't build it.");
+          "Descriptor doesn't apply to a table, so we can't build it.");
     }
 
     // set the creation time, if one hasn't been set
@@ -263,6 +270,11 @@ public final class SnapshotDescriptionUtils {
       builder.setCreationTime(time);
       snapshot = builder.build();
     }
+
+    // set the acl to snapshot if security feature is enabled.
+    if (isSecurityAvailable(conf)) {
+      snapshot = writeAclToSnapshotDescription(snapshot, conf);
+    }
     return snapshot;
   }
 
@@ -366,4 +378,26 @@ public final class SnapshotDescriptionUtils {
     if (!snapshot.hasOwner()) return false;
     return snapshot.getOwner().equals(user.getShortName());
   }
+
+  public static boolean isSecurityAvailable(Configuration conf) throws IOException {
+    try (Connection conn = ConnectionFactory.createConnection(conf)) {
+      try (Admin admin = conn.getAdmin()) {
+        return admin.tableExists(AccessControlLists.ACL_TABLE_NAME);
+      }
+    }
+  }
+
+  private static SnapshotDescription writeAclToSnapshotDescription(SnapshotDescription snapshot,
+      Configuration conf) throws IOException {
+    ListMultimap<String, TablePermission> perms =
+        User.runAsLoginUser(new PrivilegedExceptionAction<ListMultimap<String, TablePermission>>() {
+          @Override
+          public ListMultimap<String, TablePermission> run() throws Exception {
+            return AccessControlLists.getTablePermissions(conf,
+              TableName.valueOf(snapshot.getTable()));
+          }
+        });
+    return snapshot.toBuilder()
+        .setUsersAndPermissions(ShadedAccessControlUtil.toUserTablePermissions(perms)).build();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index eb96438..d3f1cbc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.util.StringUtils;
 
@@ -144,7 +145,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
     private AtomicLong nonSharedHfilesArchiveSize = new AtomicLong();
     private AtomicLong logSize = new AtomicLong();
 
-    private final HBaseProtos.SnapshotDescription snapshot;
+    private final SnapshotProtos.SnapshotDescription snapshot;
     private final TableName snapshotTable;
     private final Configuration conf;
     private final FileSystem fs;
@@ -159,7 +160,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
     }
 
     SnapshotStats(final Configuration conf, final FileSystem fs,
-        final HBaseProtos.SnapshotDescription snapshot) {
+        final SnapshotProtos.SnapshotDescription snapshot) {
       this.snapshot = snapshot;
       this.snapshotTable = TableName.valueOf(snapshot.getTable());
       this.conf = conf;
@@ -234,7 +235,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
      *    with other snapshots and tables
      *
      *    This is only calculated when
-     *  {@link #getSnapshotStats(Configuration, HBaseProtos.SnapshotDescription, Map)}
+     *  {@link #getSnapshotStats(Configuration, SnapshotProtos.SnapshotDescription, Map)}
      *    is called with a non-null Map
      */
     public long getNonSharedArchivedStoreFilesSize() {
@@ -413,7 +414,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
       return false;
     }
 
-    HBaseProtos.SnapshotDescription snapshotDesc =
+    SnapshotProtos.SnapshotDescription snapshotDesc =
         SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
     snapshotManifest = SnapshotManifest.open(getConf(), fs, snapshotDir, snapshotDesc);
     return true;
@@ -423,7 +424,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
    * Dump the {@link SnapshotDescription}
    */
   private void printInfo() {
-    HBaseProtos.SnapshotDescription snapshotDesc = snapshotManifest.getSnapshotDescription();
+    SnapshotProtos.SnapshotDescription snapshotDesc = snapshotManifest.getSnapshotDescription();
     SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
     System.out.println("Snapshot Info");
     System.out.println("----------------------------------------");
@@ -457,7 +458,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
     }
 
     // Collect information about hfiles and logs in the snapshot
-    final HBaseProtos.SnapshotDescription snapshotDesc = snapshotManifest.getSnapshotDescription();
+    final SnapshotProtos.SnapshotDescription snapshotDesc = snapshotManifest.getSnapshotDescription();
     final String table = snapshotDesc.getTable();
     final SnapshotDescription desc = ProtobufUtil.createSnapshotDesc(snapshotDesc);
     final SnapshotStats stats = new SnapshotStats(this.getConf(), this.fs, desc);
@@ -552,7 +553,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
    */
   public static SnapshotStats getSnapshotStats(final Configuration conf,
       final SnapshotDescription snapshot) throws IOException {
-    HBaseProtos.SnapshotDescription snapshotDesc =
+    SnapshotProtos.SnapshotDescription snapshotDesc =
       ProtobufUtil.createHBaseProtosSnapshotDesc(snapshot);
     return getSnapshotStats(conf, snapshotDesc, null);
   }
@@ -565,7 +566,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
    * @return the snapshot stats
    */
   public static SnapshotStats getSnapshotStats(final Configuration conf,
-      final HBaseProtos.SnapshotDescription snapshotDesc,
+      final SnapshotProtos.SnapshotDescription snapshotDesc,
       final Map<Path, Integer> filesMap) throws IOException {
     Path rootDir = FSUtils.getRootDir(conf);
     FileSystem fs = FileSystem.get(rootDir.toUri(), conf);
@@ -598,7 +599,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
         new SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter(fs));
     List<SnapshotDescription> snapshotLists = new ArrayList<>(snapshots.length);
     for (FileStatus snapshotDirStat: snapshots) {
-      HBaseProtos.SnapshotDescription snapshotDesc =
+      SnapshotProtos.SnapshotDescription snapshotDesc =
           SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDirStat.getPath());
       snapshotLists.add(ProtobufUtil.createSnapshotDesc(snapshotDesc));
     }
@@ -621,7 +622,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
       final ConcurrentHashMap<Path, Integer> filesMap,
       final AtomicLong uniqueHFilesArchiveSize, final AtomicLong uniqueHFilesSize,
       final AtomicLong uniqueHFilesMobSize) throws IOException {
-    HBaseProtos.SnapshotDescription snapshotDesc =
+    SnapshotProtos.SnapshotDescription snapshotDesc =
         ProtobufUtil.createHBaseProtosSnapshotDesc(snapshot);
     Path rootDir = FSUtils.getRootDir(conf);
     final FileSystem fs = FileSystem.get(rootDir.toUri(), conf);

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 4e838ad..c5b24e9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -46,8 +46,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
 import org.apache.hadoop.hbase.mob.MobUtils;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index 46893f9..1fa9dc5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index 567f42d..ccd3a7f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -127,7 +127,7 @@ public final class SnapshotManifestV2 {
   }
 
   static List<SnapshotRegionManifest> loadRegionManifests(final Configuration conf,
-      final Executor executor,final FileSystem fs, final Path snapshotDir,
+      final Executor executor, final FileSystem fs, final Path snapshotDir,
       final SnapshotDescription desc, final int manifestSizeLimit) throws IOException {
     FileStatus[] manifestFiles = FSUtils.listStatus(fs, snapshotDir, new PathFilter() {
       @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
index 7a2bfe6..4b49be1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.mob.MobUtils;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
index 58f74f4..434784d 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
@@ -26,10 +26,11 @@
   import="org.apache.hadoop.fs.Path"
   import="org.apache.hadoop.hbase.HBaseConfiguration"
   import="org.apache.hadoop.hbase.master.HMaster"
-  import="org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription"
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.TableName"
-  import="org.apache.hadoop.util.StringUtils" %>
+  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription"
+%>
 <%
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
   Configuration conf = master.getConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
new file mode 100644
index 0000000..5d6920a
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
@@ -0,0 +1,240 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.security.access.AccessControlConstants;
+import org.apache.hadoop.hbase.security.access.AccessController;
+import org.apache.hadoop.hbase.security.access.Permission;
+import org.apache.hadoop.hbase.security.access.SecureTestUtil;
+import org.apache.hadoop.hbase.testclassification.ClientTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.IOException;
+
+@Category({ MediumTests.class, ClientTests.class })
+public class TestSnapshotWithAcl extends SecureTestUtil {
+
+  public TableName TEST_TABLE = TableName.valueOf("TestSnapshotWithAcl");
+
+  private static final int ROW_COUNT = 30000;
+
+  private static byte[] TEST_FAMILY = Bytes.toBytes("f1");
+  private static byte[] TEST_QUALIFIER = Bytes.toBytes("cq");
+  private static byte[] TEST_ROW = Bytes.toBytes(0);
+  private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private static Configuration conf;
+  private static HBaseAdmin admin = null;
+
+  // user is table owner. will have all permissions on table
+  private static User USER_OWNER;
+  // user with rw permissions on column family.
+  private static User USER_RW;
+  // user with read-only permissions
+  private static User USER_RO;
+  // user with none permissions
+  private static User USER_NONE;
+
+  static class AccessReadAction implements AccessTestAction {
+
+    private TableName tableName;
+
+    public AccessReadAction(TableName tableName) {
+      this.tableName = tableName;
+    }
+
+    @Override
+    public Object run() throws Exception {
+      Get g = new Get(TEST_ROW);
+      g.addFamily(TEST_FAMILY);
+      try (Connection conn = ConnectionFactory.createConnection(conf)) {
+        try (Table t = conn.getTable(tableName)) {
+          t.get(g);
+        }
+      }
+      return null;
+    }
+  };
+
+  static class AccessWriteAction implements AccessTestAction {
+    private TableName tableName;
+
+    public AccessWriteAction(TableName tableName) {
+      this.tableName = tableName;
+    }
+
+    @Override
+    public Object run() throws Exception {
+      Put p = new Put(TEST_ROW);
+      p.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(0));
+      try (Connection conn = ConnectionFactory.createConnection(conf)) {
+        try (Table t = conn.getTable(tableName)) {
+          t.put(p);
+        }
+      }
+      return null;
+    }
+  }
+
+
+  @BeforeClass
+  public static void setupBeforeClass() throws Exception {
+    conf = TEST_UTIL.getConfiguration();
+    // Enable security
+    enableSecurity(conf);
+    conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName());
+    // Verify enableSecurity sets up what we require
+    verifyConfiguration(conf);
+    // Enable EXEC permission checking
+    conf.setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true);
+    TEST_UTIL.startMiniCluster();
+    MasterCoprocessorHost cpHost =
+        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterCoprocessorHost();
+    cpHost.load(AccessController.class, Coprocessor.PRIORITY_HIGHEST, conf);
+
+    USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]);
+    USER_RW = User.createUserForTesting(conf, "rwuser", new String[0]);
+    USER_RO = User.createUserForTesting(conf, "rouser", new String[0]);
+    USER_NONE = User.createUserForTesting(conf, "usernone", new String[0]);
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    admin = TEST_UTIL.getHBaseAdmin();
+    HTableDescriptor htd = new HTableDescriptor(TEST_TABLE);
+    HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
+    hcd.setMaxVersions(100);
+    htd.addFamily(hcd);
+    htd.setOwner(USER_OWNER);
+    admin.createTable(htd, new byte[][] { Bytes.toBytes("s") });
+    TEST_UTIL.waitTableEnabled(TEST_TABLE);
+
+    grantOnTable(TEST_UTIL, USER_RW.getShortName(), TEST_TABLE, TEST_FAMILY, null,
+            Permission.Action.READ, Permission.Action.WRITE);
+
+    grantOnTable(TEST_UTIL, USER_RO.getShortName(), TEST_TABLE, TEST_FAMILY, null,
+            Permission.Action.READ);
+  }
+
+  private void loadData() throws IOException {
+    try (Connection conn = ConnectionFactory.createConnection(conf)) {
+      try (Table t = conn.getTable(TEST_TABLE)) {
+        for (int i = 0; i < ROW_COUNT; i++) {
+          Put put = new Put(Bytes.toBytes(i));
+          put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
+          t.put(put);
+        }
+      }
+    }
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  private void verifyRows(TableName tableName) throws IOException {
+    try (Connection conn = ConnectionFactory.createConnection(conf)) {
+      try (Table t = conn.getTable(tableName)) {
+        try (ResultScanner scanner = t.getScanner(new Scan())) {
+          Result result;
+          int rowCount = 0;
+          while ((result = scanner.next()) != null) {
+            byte[] value = result.getValue(TEST_FAMILY, TEST_QUALIFIER);
+            Assert.assertArrayEquals(value, Bytes.toBytes(rowCount++));
+          }
+          Assert.assertEquals(rowCount, ROW_COUNT);
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testRestoreSnapshot() throws Exception {
+    verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RO, USER_RW);
+    verifyDenied(new AccessReadAction(TEST_TABLE), USER_NONE);
+    verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW);
+    verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE);
+
+    loadData();
+    verifyRows(TEST_TABLE);
+
+    String snapshotName1 = "testSnapshot1";
+    admin.snapshot(snapshotName1, TEST_TABLE);
+
+    // clone snapshot with restoreAcl true.
+    TableName tableName1 = TableName.valueOf("tableName1");
+    admin.cloneSnapshot(snapshotName1, tableName1, true);
+    verifyRows(tableName1);
+    verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW);
+    verifyDenied(new AccessReadAction(tableName1), USER_NONE);
+    verifyAllowed(new AccessWriteAction(tableName1), USER_OWNER, USER_RW);
+    verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE);
+
+    // clone snapshot with restoreAcl false.
+    TableName tableName2 = TableName.valueOf("tableName2");
+    admin.cloneSnapshot(snapshotName1, tableName2, false);
+    verifyRows(tableName2);
+    verifyAllowed(new AccessReadAction(tableName2), USER_OWNER);
+    verifyDenied(new AccessReadAction(tableName2), USER_NONE, USER_RO, USER_RW);
+    verifyAllowed(new AccessWriteAction(tableName2), USER_OWNER);
+    verifyDenied(new AccessWriteAction(tableName2), USER_RO, USER_RW, USER_NONE);
+
+    // remove read permission for USER_RO.
+    revokeFromTable(TEST_UTIL, USER_RO.getShortName(), TEST_TABLE, TEST_FAMILY, null,
+      Permission.Action.READ);
+    verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RW);
+    verifyDenied(new AccessReadAction(TEST_TABLE), USER_RO, USER_NONE);
+    verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW);
+    verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE);
+
+    // restore snapshot with restoreAcl false.
+    admin.disableTable(TEST_TABLE);
+    admin.restoreSnapshot(snapshotName1, false, false);
+    admin.enableTable(TEST_TABLE);
+    verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RW);
+    verifyDenied(new AccessReadAction(TEST_TABLE), USER_RO, USER_NONE);
+    verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW);
+    verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE);
+
+    // restore snapshot with restoreAcl true.
+    admin.disableTable(TEST_TABLE);
+    admin.restoreSnapshot(snapshotName1, false, true);
+    admin.enableTable(TEST_TABLE);
+    verifyAllowed(new AccessReadAction(TEST_TABLE), USER_OWNER, USER_RO, USER_RW);
+    verifyDenied(new AccessReadAction(TEST_TABLE), USER_NONE);
+    verifyAllowed(new AccessWriteAction(TEST_TABLE), USER_OWNER, USER_RW);
+    verifyDenied(new AccessWriteAction(TEST_TABLE), USER_RO, USER_NONE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 3b80406..1e6d717 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -64,10 +64,10 @@ import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 9c6b132..8f88af7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.snapshot.DisabledTableSnapshotHandler;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
@@ -50,6 +49,7 @@ import org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
 import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
index d8221bb..54efe76 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotState;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -50,7 +51,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
 
   protected final byte[] CF = Bytes.toBytes("cf1");
 
-  private static HBaseProtos.SnapshotDescription snapshot = null;
+  private static SnapshotProtos.SnapshotDescription snapshot = null;
 
   @After
   @Override
@@ -60,7 +61,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
     snapshot = null;
   }
 
-  private HBaseProtos.SnapshotDescription getSnapshot() throws Exception {
+  private SnapshotProtos.SnapshotDescription getSnapshot() throws Exception {
     if (snapshot == null) {
       final TableName snapshotTableName = TableName.valueOf("testCloneSnapshot");
       long tid = System.currentTimeMillis();
@@ -102,7 +103,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
     final HTableDescriptor htd = createHTableDescriptor(clonedTableName, CF);
 
     // take the snapshot
-    HBaseProtos.SnapshotDescription snapshotDesc = getSnapshot();
+    SnapshotProtos.SnapshotDescription snapshotDesc = getSnapshot();
 
     long procId = ProcedureTestingUtility.submitAndWait(
       procExec, new CloneSnapshotProcedure(procExec.getEnvironment(), htd, snapshotDesc));
@@ -115,7 +116,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
   @Test(timeout=60000)
   public void testCloneSnapshotToSameTable() throws Exception {
     // take the snapshot
-    HBaseProtos.SnapshotDescription snapshotDesc = getSnapshot();
+    SnapshotProtos.SnapshotDescription snapshotDesc = getSnapshot();
 
     final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
     final TableName clonedTableName = TableName.valueOf(snapshotDesc.getTable());
@@ -137,7 +138,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
     final HTableDescriptor htd = createHTableDescriptor(clonedTableName, CF);
 
     // take the snapshot
-    HBaseProtos.SnapshotDescription snapshotDesc = getSnapshot();
+    SnapshotProtos.SnapshotDescription snapshotDesc = getSnapshot();
 
     ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
 
@@ -161,7 +162,7 @@ public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase {
     final HTableDescriptor htd = createHTableDescriptor(clonedTableName, CF);
 
     // take the snapshot
-    HBaseProtos.SnapshotDescription snapshotDesc = getSnapshot();
+    SnapshotProtos.SnapshotDescription snapshotDesc = getSnapshot();
 
     ProcedureTestingUtility.waitNoProcedureRunning(procExec);
     ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
index 479b206..9141e0f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotState;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -67,7 +68,7 @@ public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase {
   protected final int rowCountCF4 = 40;
   protected final int rowCountCF1addition = 10;
 
-  private HBaseProtos.SnapshotDescription snapshot = null;
+  private SnapshotProtos.SnapshotDescription snapshot = null;
   private HTableDescriptor snapshotHTD = null;
 
   @Rule

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
index 1dd1c3a..1d1a936 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
@@ -167,7 +167,7 @@ public class SecureTestUtil {
    * To indicate the action was not allowed, either throw an AccessDeniedException
    * or return an empty list of KeyValues.
    */
-  static interface AccessTestAction extends PrivilegedExceptionAction<Object> { }
+  protected static interface AccessTestAction extends PrivilegedExceptionAction<Object> { }
 
   /** This fails only in case of ADE or empty list for any of the actions. */
   public static void verifyAllowed(User user, AccessTestAction... actions) throws Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 8bf2c5c..bcf8670 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -117,8 +117,8 @@ import org.apache.hadoop.hbase.security.Superusers;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.Permission.Action;
 import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.SecurityTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -131,7 +131,6 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
-import org.mockito.Mockito;
 
 import com.google.protobuf.BlockingRpcChannel;
 import com.google.protobuf.RpcCallback;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
index 08c8107..040dfa6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
@@ -57,7 +57,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
 import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
 import org.apache.hadoop.hbase.regionserver.Region;
@@ -67,6 +66,7 @@ import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.Permission.Action;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.SecurityTests;
 import org.apache.hadoop.hbase.util.Bytes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
index 3c10dee..9098277 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
@@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.mob.MobUtils;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
@@ -127,7 +128,7 @@ public final class SnapshotTestingUtils {
    * Make sure that there is only one snapshot returned from the master
    */
   public static void assertOneSnapshotThatMatches(Admin admin,
-      HBaseProtos.SnapshotDescription snapshot) throws IOException {
+      SnapshotProtos.SnapshotDescription snapshot) throws IOException {
     assertOneSnapshotThatMatches(admin, snapshot.getName(), TableName.valueOf(snapshot.getTable()));
   }
 
@@ -159,20 +160,19 @@ public final class SnapshotTestingUtils {
   }
 
   public static void confirmSnapshotValid(HBaseTestingUtility testUtil,
-      HBaseProtos.SnapshotDescription snapshotDescriptor, TableName tableName, byte[] family)
+      SnapshotProtos.SnapshotDescription snapshotDescriptor, TableName tableName, byte[] family)
       throws IOException {
     MasterFileSystem mfs = testUtil.getHBaseCluster().getMaster().getMasterFileSystem();
-    confirmSnapshotValid(snapshotDescriptor, tableName, family,
-        mfs.getRootDir(), testUtil.getAdmin(), mfs.getFileSystem());
+    confirmSnapshotValid(snapshotDescriptor, tableName, family, mfs.getRootDir(),
+      testUtil.getAdmin(), mfs.getFileSystem());
   }
 
   /**
    * Confirm that the snapshot contains references to all the files that should
    * be in the snapshot.
    */
-  public static void confirmSnapshotValid(
-      HBaseProtos.SnapshotDescription snapshotDescriptor, TableName tableName,
-      byte[] testFamily, Path rootDir, Admin admin, FileSystem fs)
+  public static void confirmSnapshotValid(SnapshotProtos.SnapshotDescription snapshotDescriptor,
+      TableName tableName, byte[] testFamily, Path rootDir, Admin admin, FileSystem fs)
       throws IOException {
     ArrayList nonEmptyTestFamilies = new ArrayList(1);
     nonEmptyTestFamilies.add(testFamily);
@@ -184,7 +184,7 @@ public final class SnapshotTestingUtils {
    * Confirm that the snapshot has no references files but only metadata.
    */
   public static void confirmEmptySnapshotValid(
-      HBaseProtos.SnapshotDescription snapshotDescriptor, TableName tableName,
+      SnapshotProtos.SnapshotDescription snapshotDescriptor, TableName tableName,
       byte[] testFamily, Path rootDir, Admin admin, FileSystem fs)
       throws IOException {
     ArrayList emptyTestFamilies = new ArrayList(1);
@@ -200,7 +200,7 @@ public final class SnapshotTestingUtils {
    * by the MasterSnapshotVerifier, at the end of the snapshot operation.
    */
   public static void confirmSnapshotValid(
-      HBaseProtos.SnapshotDescription snapshotDescriptor, TableName tableName,
+      SnapshotProtos.SnapshotDescription snapshotDescriptor, TableName tableName,
       List<byte[]> nonEmptyTestFamilies, List<byte[]> emptyTestFamilies,
       Path rootDir, Admin admin, FileSystem fs) throws IOException {
     final Configuration conf = admin.getConfiguration();
@@ -210,7 +210,7 @@ public final class SnapshotTestingUtils {
         snapshotDescriptor, rootDir);
     assertTrue(fs.exists(snapshotDir));
 
-    HBaseProtos.SnapshotDescription desc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
+    SnapshotProtos.SnapshotDescription desc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
 
     // Extract regions and families with store files
     final Set<byte[]> snapshotFamilies = new TreeSet<>(Bytes.BYTES_COMPARATOR);
@@ -272,7 +272,7 @@ public final class SnapshotTestingUtils {
    * @throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException
    */
   public static void waitForSnapshotToComplete(HMaster master,
-      HBaseProtos.SnapshotDescription snapshot, long sleep)
+      SnapshotProtos.SnapshotDescription snapshot, long sleep)
           throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException {
     final IsSnapshotDoneRequest request = IsSnapshotDoneRequest.newBuilder()
         .setSnapshot(snapshot).build();
@@ -426,7 +426,7 @@ public final class SnapshotTestingUtils {
 
     Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName,
                                                                         mfs.getRootDir());
-    HBaseProtos.SnapshotDescription snapshotDesc =
+    SnapshotProtos.SnapshotDescription snapshotDesc =
         SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
     final TableName table = TableName.valueOf(snapshotDesc.getTable());
 
@@ -476,7 +476,7 @@ public final class SnapshotTestingUtils {
 
     public static class SnapshotBuilder {
       private final RegionData[] tableRegions;
-      private final HBaseProtos.SnapshotDescription desc;
+      private final SnapshotProtos.SnapshotDescription desc;
       private final HTableDescriptor htd;
       private final Configuration conf;
       private final FileSystem fs;
@@ -486,7 +486,7 @@ public final class SnapshotTestingUtils {
 
       public SnapshotBuilder(final Configuration conf, final FileSystem fs,
           final Path rootDir, final HTableDescriptor htd,
-          final HBaseProtos.SnapshotDescription desc, final RegionData[] tableRegions)
+          final SnapshotProtos.SnapshotDescription desc, final RegionData[] tableRegions)
           throws IOException {
         this.fs = fs;
         this.conf = conf;
@@ -503,7 +503,7 @@ public final class SnapshotTestingUtils {
         return this.htd;
       }
 
-      public HBaseProtos.SnapshotDescription getSnapshotDescription() {
+      public SnapshotProtos.SnapshotDescription getSnapshotDescription() {
         return this.desc;
       }
 
@@ -527,7 +527,7 @@ public final class SnapshotTestingUtils {
                           .build());
       }
 
-      private Path[] addRegion(final HBaseProtos.SnapshotDescription desc) throws IOException {
+      private Path[] addRegion(final SnapshotProtos.SnapshotDescription desc) throws IOException {
         if (this.snapshotted == tableRegions.length) {
           throw new UnsupportedOperationException("No more regions in the table");
         }
@@ -668,7 +668,7 @@ public final class SnapshotTestingUtils {
       HTableDescriptor htd = createHtd(tableName);
       RegionData[] regions = createTable(htd, numRegions);
 
-      HBaseProtos.SnapshotDescription desc = HBaseProtos.SnapshotDescription.newBuilder()
+      SnapshotProtos.SnapshotDescription desc = SnapshotProtos.SnapshotDescription.newBuilder()
         .setTable(htd.getNameAsString())
         .setName(snapshotName)
         .setVersion(version)

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index cc055a5..b875320 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
index 86405dc..2882120 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -274,9 +275,9 @@ public class TestFlushSnapshotFromClient {
 
   @Test
   public void testAsyncFlushSnapshot() throws Exception {
-    HBaseProtos.SnapshotDescription snapshot = HBaseProtos.SnapshotDescription.newBuilder()
+    SnapshotProtos.SnapshotDescription snapshot = SnapshotProtos.SnapshotDescription.newBuilder()
         .setName("asyncSnapshot").setTable(TABLE_NAME.getNameAsString())
-        .setType(HBaseProtos.SnapshotDescription.Type.FLUSH).build();
+        .setType(SnapshotProtos.SnapshotDescription.Type.FLUSH).build();
 
     // take the snapshot async
     admin.takeSnapshotAsync(

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
index 305e7d4..4b684e3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
@@ -31,12 +31,12 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils.SnapshotMock;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
index c49595e..bd9d5dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.TestTableName;
 import org.junit.After;

http://git-wip-us.apache.org/repos/asf/hbase/blob/37dd8ff7/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
index cd87993..038ee8c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
@@ -69,7 +69,7 @@ public class TestSnapshotDescriptionUtils {
   private static final Log LOG = LogFactory.getLog(TestSnapshotDescriptionUtils.class);
 
   @Test
-  public void testValidateMissingTableName() {
+  public void testValidateMissingTableName() throws IOException {
     Configuration conf = new Configuration(false);
     try {
       SnapshotDescriptionUtils.validate(SnapshotDescription.newBuilder().setName("fail").build(),