You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2016/10/05 23:30:13 UTC

[07/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
index 4699c81..c5220cc 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
@@ -9,198 +9,27 @@ public final class BackupProtos {
       com.google.protobuf.ExtensionRegistry registry) {
   }
   /**
-   * Protobuf enum {@code hbase.pb.FullTableBackupState}
-   */
-  public enum FullTableBackupState
-      implements com.google.protobuf.ProtocolMessageEnum {
-    /**
-     * <code>PRE_SNAPSHOT_TABLE = 1;</code>
-     */
-    PRE_SNAPSHOT_TABLE(0, 1),
-    /**
-     * <code>SNAPSHOT_TABLES = 2;</code>
-     */
-    SNAPSHOT_TABLES(1, 2),
-    /**
-     * <code>SNAPSHOT_COPY = 3;</code>
-     */
-    SNAPSHOT_COPY(2, 3),
-    /**
-     * <code>BACKUP_COMPLETE = 4;</code>
-     */
-    BACKUP_COMPLETE(3, 4),
-    ;
-
-    /**
-     * <code>PRE_SNAPSHOT_TABLE = 1;</code>
-     */
-    public static final int PRE_SNAPSHOT_TABLE_VALUE = 1;
-    /**
-     * <code>SNAPSHOT_TABLES = 2;</code>
-     */
-    public static final int SNAPSHOT_TABLES_VALUE = 2;
-    /**
-     * <code>SNAPSHOT_COPY = 3;</code>
-     */
-    public static final int SNAPSHOT_COPY_VALUE = 3;
-    /**
-     * <code>BACKUP_COMPLETE = 4;</code>
-     */
-    public static final int BACKUP_COMPLETE_VALUE = 4;
-
-
-    public final int getNumber() { return value; }
-
-    public static FullTableBackupState valueOf(int value) {
-      switch (value) {
-        case 1: return PRE_SNAPSHOT_TABLE;
-        case 2: return SNAPSHOT_TABLES;
-        case 3: return SNAPSHOT_COPY;
-        case 4: return BACKUP_COMPLETE;
-        default: return null;
-      }
-    }
-
-    public static com.google.protobuf.Internal.EnumLiteMap<FullTableBackupState>
-        internalGetValueMap() {
-      return internalValueMap;
-    }
-    private static com.google.protobuf.Internal.EnumLiteMap<FullTableBackupState>
-        internalValueMap =
-          new com.google.protobuf.Internal.EnumLiteMap<FullTableBackupState>() {
-            public FullTableBackupState findValueByNumber(int number) {
-              return FullTableBackupState.valueOf(number);
-            }
-          };
-
-    public final com.google.protobuf.Descriptors.EnumValueDescriptor
-        getValueDescriptor() {
-      return getDescriptor().getValues().get(index);
-    }
-    public final com.google.protobuf.Descriptors.EnumDescriptor
-        getDescriptorForType() {
-      return getDescriptor();
-    }
-    public static final com.google.protobuf.Descriptors.EnumDescriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.getDescriptor().getEnumTypes().get(0);
-    }
-
-    private static final FullTableBackupState[] VALUES = values();
-
-    public static FullTableBackupState valueOf(
-        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-      if (desc.getType() != getDescriptor()) {
-        throw new java.lang.IllegalArgumentException(
-          "EnumValueDescriptor is not for this type.");
-      }
-      return VALUES[desc.getIndex()];
-    }
-
-    private final int index;
-    private final int value;
-
-    private FullTableBackupState(int index, int value) {
-      this.index = index;
-      this.value = value;
-    }
-
-    // @@protoc_insertion_point(enum_scope:hbase.pb.FullTableBackupState)
-  }
-
-  /**
-   * Protobuf enum {@code hbase.pb.IncrementalTableBackupState}
-   */
-  public enum IncrementalTableBackupState
-      implements com.google.protobuf.ProtocolMessageEnum {
-    /**
-     * <code>PREPARE_INCREMENTAL = 1;</code>
-     */
-    PREPARE_INCREMENTAL(0, 1),
-    /**
-     * <code>INCREMENTAL_COPY = 2;</code>
-     */
-    INCREMENTAL_COPY(1, 2),
-    /**
-     * <code>INCR_BACKUP_COMPLETE = 3;</code>
-     */
-    INCR_BACKUP_COMPLETE(2, 3),
-    ;
-
-    /**
-     * <code>PREPARE_INCREMENTAL = 1;</code>
-     */
-    public static final int PREPARE_INCREMENTAL_VALUE = 1;
-    /**
-     * <code>INCREMENTAL_COPY = 2;</code>
-     */
-    public static final int INCREMENTAL_COPY_VALUE = 2;
-    /**
-     * <code>INCR_BACKUP_COMPLETE = 3;</code>
-     */
-    public static final int INCR_BACKUP_COMPLETE_VALUE = 3;
-
-
-    public final int getNumber() { return value; }
-
-    public static IncrementalTableBackupState valueOf(int value) {
-      switch (value) {
-        case 1: return PREPARE_INCREMENTAL;
-        case 2: return INCREMENTAL_COPY;
-        case 3: return INCR_BACKUP_COMPLETE;
-        default: return null;
-      }
-    }
-
-    public static com.google.protobuf.Internal.EnumLiteMap<IncrementalTableBackupState>
-        internalGetValueMap() {
-      return internalValueMap;
-    }
-    private static com.google.protobuf.Internal.EnumLiteMap<IncrementalTableBackupState>
-        internalValueMap =
-          new com.google.protobuf.Internal.EnumLiteMap<IncrementalTableBackupState>() {
-            public IncrementalTableBackupState findValueByNumber(int number) {
-              return IncrementalTableBackupState.valueOf(number);
-            }
-          };
-
-    public final com.google.protobuf.Descriptors.EnumValueDescriptor
-        getValueDescriptor() {
-      return getDescriptor().getValues().get(index);
-    }
-    public final com.google.protobuf.Descriptors.EnumDescriptor
-        getDescriptorForType() {
-      return getDescriptor();
-    }
-    public static final com.google.protobuf.Descriptors.EnumDescriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.getDescriptor().getEnumTypes().get(1);
-    }
-
-    private static final IncrementalTableBackupState[] VALUES = values();
-
-    public static IncrementalTableBackupState valueOf(
-        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-      if (desc.getType() != getDescriptor()) {
-        throw new java.lang.IllegalArgumentException(
-          "EnumValueDescriptor is not for this type.");
-      }
-      return VALUES[desc.getIndex()];
-    }
-
-    private final int index;
-    private final int value;
-
-    private IncrementalTableBackupState(int index, int value) {
-      this.index = index;
-      this.value = value;
-    }
-
-    // @@protoc_insertion_point(enum_scope:hbase.pb.IncrementalTableBackupState)
-  }
-
-  /**
    * Protobuf enum {@code hbase.pb.BackupType}
+   *
+   * <pre>
+   *enum FullTableBackupState {
+   *PRE_SNAPSHOT_TABLE = 1;
+   *SNAPSHOT_TABLES = 2;
+   *SNAPSHOT_COPY = 3;
+   *BACKUP_COMPLETE = 4;
+   *}
+   *
+   *enum IncrementalTableBackupState {
+   *PREPARE_INCREMENTAL = 1;
+   *INCREMENTAL_COPY = 2;
+   *INCR_BACKUP_COMPLETE = 3;
+   *}
+   *
+   *message SnapshotTableStateData {
+   *required TableName table = 1;
+   *required string snapshotName = 2;
+   *}
+   * </pre>
    */
   public enum BackupType
       implements com.google.protobuf.ProtocolMessageEnum {
@@ -256,7 +85,7 @@ public final class BackupProtos {
     }
     public static final com.google.protobuf.Descriptors.EnumDescriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.getDescriptor().getEnumTypes().get(2);
+      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.getDescriptor().getEnumTypes().get(0);
     }
 
     private static final BackupType[] VALUES = values();
@@ -281,57 +110,138 @@ public final class BackupProtos {
     // @@protoc_insertion_point(enum_scope:hbase.pb.BackupType)
   }
 
-  public interface SnapshotTableStateDataOrBuilder
+  public interface BackupImageOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
-    // required .hbase.pb.TableName table = 1;
+    // required string backup_id = 1;
     /**
-     * <code>required .hbase.pb.TableName table = 1;</code>
+     * <code>required string backup_id = 1;</code>
      */
-    boolean hasTable();
+    boolean hasBackupId();
     /**
-     * <code>required .hbase.pb.TableName table = 1;</code>
+     * <code>required string backup_id = 1;</code>
      */
-    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTable();
+    java.lang.String getBackupId();
     /**
-     * <code>required .hbase.pb.TableName table = 1;</code>
+     * <code>required string backup_id = 1;</code>
      */
-    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableOrBuilder();
+    com.google.protobuf.ByteString
+        getBackupIdBytes();
+
+    // required .hbase.pb.BackupType backup_type = 2;
+    /**
+     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
+     */
+    boolean hasBackupType();
+    /**
+     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType getBackupType();
 
-    // required string snapshotName = 2;
+    // required string root_dir = 3;
     /**
-     * <code>required string snapshotName = 2;</code>
+     * <code>required string root_dir = 3;</code>
      */
-    boolean hasSnapshotName();
+    boolean hasRootDir();
     /**
-     * <code>required string snapshotName = 2;</code>
+     * <code>required string root_dir = 3;</code>
      */
-    java.lang.String getSnapshotName();
+    java.lang.String getRootDir();
     /**
-     * <code>required string snapshotName = 2;</code>
+     * <code>required string root_dir = 3;</code>
      */
     com.google.protobuf.ByteString
-        getSnapshotNameBytes();
+        getRootDirBytes();
+
+    // repeated .hbase.pb.TableName table_list = 4;
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> 
+        getTableListList();
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableList(int index);
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    int getTableListCount();
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
+        getTableListOrBuilderList();
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableListOrBuilder(
+        int index);
+
+    // required uint64 start_ts = 5;
+    /**
+     * <code>required uint64 start_ts = 5;</code>
+     */
+    boolean hasStartTs();
+    /**
+     * <code>required uint64 start_ts = 5;</code>
+     */
+    long getStartTs();
+
+    // required uint64 complete_ts = 6;
+    /**
+     * <code>required uint64 complete_ts = 6;</code>
+     */
+    boolean hasCompleteTs();
+    /**
+     * <code>required uint64 complete_ts = 6;</code>
+     */
+    long getCompleteTs();
+
+    // repeated .hbase.pb.BackupImage ancestors = 7;
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage> 
+        getAncestorsList();
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage getAncestors(int index);
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    int getAncestorsCount();
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder> 
+        getAncestorsOrBuilderList();
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder getAncestorsOrBuilder(
+        int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.SnapshotTableStateData}
+   * Protobuf type {@code hbase.pb.BackupImage}
    */
-  public static final class SnapshotTableStateData extends
+  public static final class BackupImage extends
       com.google.protobuf.GeneratedMessage
-      implements SnapshotTableStateDataOrBuilder {
-    // Use SnapshotTableStateData.newBuilder() to construct.
-    private SnapshotTableStateData(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      implements BackupImageOrBuilder {
+    // Use BackupImage.newBuilder() to construct.
+    private BackupImage(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
       super(builder);
       this.unknownFields = builder.getUnknownFields();
     }
-    private SnapshotTableStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+    private BackupImage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-    private static final SnapshotTableStateData defaultInstance;
-    public static SnapshotTableStateData getDefaultInstance() {
+    private static final BackupImage defaultInstance;
+    public static BackupImage getDefaultInstance() {
       return defaultInstance;
     }
 
-    public SnapshotTableStateData getDefaultInstanceForType() {
+    public BackupImage getDefaultInstanceForType() {
       return defaultInstance;
     }
 
@@ -341,7 +251,7 @@ public final class BackupProtos {
         getUnknownFields() {
       return this.unknownFields;
     }
-    private SnapshotTableStateData(
+    private BackupImage(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
@@ -365,21 +275,50 @@ public final class BackupProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
-              if (((bitField0_ & 0x00000001) == 0x00000001)) {
-                subBuilder = table_.toBuilder();
-              }
-              table_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
-              if (subBuilder != null) {
-                subBuilder.mergeFrom(table_);
-                table_ = subBuilder.buildPartial();
-              }
               bitField0_ |= 0x00000001;
+              backupId_ = input.readBytes();
               break;
             }
-            case 18: {
-              bitField0_ |= 0x00000002;
-              snapshotName_ = input.readBytes();
+            case 16: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType value = org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(2, rawValue);
+              } else {
+                bitField0_ |= 0x00000002;
+                backupType_ = value;
+              }
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              rootDir_ = input.readBytes();
+              break;
+            }
+            case 34: {
+              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+                tableList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>();
+                mutable_bitField0_ |= 0x00000008;
+              }
+              tableList_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
+              break;
+            }
+            case 40: {
+              bitField0_ |= 0x00000008;
+              startTs_ = input.readUInt64();
+              break;
+            }
+            case 48: {
+              bitField0_ |= 0x00000010;
+              completeTs_ = input.readUInt64();
+              break;
+            }
+            case 58: {
+              if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
+                ancestors_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage>();
+                mutable_bitField0_ |= 0x00000040;
+              }
+              ancestors_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.PARSER, extensionRegistry));
               break;
             }
           }
@@ -390,74 +329,117 @@ public final class BackupProtos {
         throw new com.google.protobuf.InvalidProtocolBufferException(
             e.getMessage()).setUnfinishedMessage(this);
       } finally {
+        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+          tableList_ = java.util.Collections.unmodifiableList(tableList_);
+        }
+        if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
+          ancestors_ = java.util.Collections.unmodifiableList(ancestors_);
+        }
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
       }
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_SnapshotTableStateData_descriptor;
+      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_SnapshotTableStateData_fieldAccessorTable
+      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.class, org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.Builder.class);
+              org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.class, org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<SnapshotTableStateData> PARSER =
-        new com.google.protobuf.AbstractParser<SnapshotTableStateData>() {
-      public SnapshotTableStateData parsePartialFrom(
+    public static com.google.protobuf.Parser<BackupImage> PARSER =
+        new com.google.protobuf.AbstractParser<BackupImage>() {
+      public BackupImage parsePartialFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return new SnapshotTableStateData(input, extensionRegistry);
+        return new BackupImage(input, extensionRegistry);
       }
     };
 
     @java.lang.Override
-    public com.google.protobuf.Parser<SnapshotTableStateData> getParserForType() {
+    public com.google.protobuf.Parser<BackupImage> getParserForType() {
       return PARSER;
     }
 
     private int bitField0_;
-    // required .hbase.pb.TableName table = 1;
-    public static final int TABLE_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName table_;
+    // required string backup_id = 1;
+    public static final int BACKUP_ID_FIELD_NUMBER = 1;
+    private java.lang.Object backupId_;
     /**
-     * <code>required .hbase.pb.TableName table = 1;</code>
+     * <code>required string backup_id = 1;</code>
      */
-    public boolean hasTable() {
+    public boolean hasBackupId() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * <code>required .hbase.pb.TableName table = 1;</code>
+     * <code>required string backup_id = 1;</code>
      */
-    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTable() {
-      return table_;
+    public java.lang.String getBackupId() {
+      java.lang.Object ref = backupId_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          backupId_ = s;
+        }
+        return s;
+      }
     }
     /**
-     * <code>required .hbase.pb.TableName table = 1;</code>
+     * <code>required string backup_id = 1;</code>
      */
-    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableOrBuilder() {
-      return table_;
+    public com.google.protobuf.ByteString
+        getBackupIdBytes() {
+      java.lang.Object ref = backupId_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        backupId_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
     }
 
-    // required string snapshotName = 2;
-    public static final int SNAPSHOTNAME_FIELD_NUMBER = 2;
-    private java.lang.Object snapshotName_;
+    // required .hbase.pb.BackupType backup_type = 2;
+    public static final int BACKUP_TYPE_FIELD_NUMBER = 2;
+    private org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType backupType_;
     /**
-     * <code>required string snapshotName = 2;</code>
+     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
      */
-    public boolean hasSnapshotName() {
+    public boolean hasBackupType() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
     /**
-     * <code>required string snapshotName = 2;</code>
+     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType getBackupType() {
+      return backupType_;
+    }
+
+    // required string root_dir = 3;
+    public static final int ROOT_DIR_FIELD_NUMBER = 3;
+    private java.lang.Object rootDir_;
+    /**
+     * <code>required string root_dir = 3;</code>
+     */
+    public boolean hasRootDir() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>required string root_dir = 3;</code>
      */
-    public java.lang.String getSnapshotName() {
-      java.lang.Object ref = snapshotName_;
+    public java.lang.String getRootDir() {
+      java.lang.Object ref = rootDir_;
       if (ref instanceof java.lang.String) {
         return (java.lang.String) ref;
       } else {
@@ -465,61 +447,205 @@ public final class BackupProtos {
             (com.google.protobuf.ByteString) ref;
         java.lang.String s = bs.toStringUtf8();
         if (bs.isValidUtf8()) {
-          snapshotName_ = s;
+          rootDir_ = s;
         }
         return s;
       }
     }
     /**
-     * <code>required string snapshotName = 2;</code>
+     * <code>required string root_dir = 3;</code>
      */
     public com.google.protobuf.ByteString
-        getSnapshotNameBytes() {
-      java.lang.Object ref = snapshotName_;
+        getRootDirBytes() {
+      java.lang.Object ref = rootDir_;
       if (ref instanceof java.lang.String) {
         com.google.protobuf.ByteString b = 
             com.google.protobuf.ByteString.copyFromUtf8(
                 (java.lang.String) ref);
-        snapshotName_ = b;
+        rootDir_ = b;
         return b;
       } else {
         return (com.google.protobuf.ByteString) ref;
       }
     }
 
+    // repeated .hbase.pb.TableName table_list = 4;
+    public static final int TABLE_LIST_FIELD_NUMBER = 4;
+    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableList_;
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableListList() {
+      return tableList_;
+    }
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
+        getTableListOrBuilderList() {
+      return tableList_;
+    }
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    public int getTableListCount() {
+      return tableList_.size();
+    }
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableList(int index) {
+      return tableList_.get(index);
+    }
+    /**
+     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableListOrBuilder(
+        int index) {
+      return tableList_.get(index);
+    }
+
+    // required uint64 start_ts = 5;
+    public static final int START_TS_FIELD_NUMBER = 5;
+    private long startTs_;
+    /**
+     * <code>required uint64 start_ts = 5;</code>
+     */
+    public boolean hasStartTs() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>required uint64 start_ts = 5;</code>
+     */
+    public long getStartTs() {
+      return startTs_;
+    }
+
+    // required uint64 complete_ts = 6;
+    public static final int COMPLETE_TS_FIELD_NUMBER = 6;
+    private long completeTs_;
+    /**
+     * <code>required uint64 complete_ts = 6;</code>
+     */
+    public boolean hasCompleteTs() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>required uint64 complete_ts = 6;</code>
+     */
+    public long getCompleteTs() {
+      return completeTs_;
+    }
+
+    // repeated .hbase.pb.BackupImage ancestors = 7;
+    public static final int ANCESTORS_FIELD_NUMBER = 7;
+    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage> ancestors_;
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage> getAncestorsList() {
+      return ancestors_;
+    }
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder> 
+        getAncestorsOrBuilderList() {
+      return ancestors_;
+    }
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    public int getAncestorsCount() {
+      return ancestors_.size();
+    }
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage getAncestors(int index) {
+      return ancestors_.get(index);
+    }
+    /**
+     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder getAncestorsOrBuilder(
+        int index) {
+      return ancestors_.get(index);
+    }
+
     private void initFields() {
-      table_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      snapshotName_ = "";
+      backupId_ = "";
+      backupType_ = org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType.FULL;
+      rootDir_ = "";
+      tableList_ = java.util.Collections.emptyList();
+      startTs_ = 0L;
+      completeTs_ = 0L;
+      ancestors_ = java.util.Collections.emptyList();
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
 
-      if (!hasTable()) {
+      if (!hasBackupId()) {
         memoizedIsInitialized = 0;
         return false;
       }
-      if (!hasSnapshotName()) {
+      if (!hasBackupType()) {
         memoizedIsInitialized = 0;
         return false;
       }
-      if (!getTable().isInitialized()) {
+      if (!hasRootDir()) {
         memoizedIsInitialized = 0;
         return false;
       }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeMessage(1, table_);
+      if (!hasStartTs()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasCompleteTs()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      for (int i = 0; i < getTableListCount(); i++) {
+        if (!getTableList(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      for (int i = 0; i < getAncestorsCount(); i++) {
+        if (!getAncestors(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getBackupIdBytes());
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeBytes(2, getSnapshotNameBytes());
+        output.writeEnum(2, backupType_.getNumber());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, getRootDirBytes());
+      }
+      for (int i = 0; i < tableList_.size(); i++) {
+        output.writeMessage(4, tableList_.get(i));
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeUInt64(5, startTs_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeUInt64(6, completeTs_);
+      }
+      for (int i = 0; i < ancestors_.size(); i++) {
+        output.writeMessage(7, ancestors_.get(i));
       }
       getUnknownFields().writeTo(output);
     }
@@ -532,11 +658,31 @@ public final class BackupProtos {
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, table_);
+          .computeBytesSize(1, getBackupIdBytes());
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(2, getSnapshotNameBytes());
+          .computeEnumSize(2, backupType_.getNumber());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, getRootDirBytes());
+      }
+      for (int i = 0; i < tableList_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(4, tableList_.get(i));
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt64Size(5, startTs_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt64Size(6, completeTs_);
+      }
+      for (int i = 0; i < ancestors_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(7, ancestors_.get(i));
       }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
@@ -555,22 +701,41 @@ public final class BackupProtos {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData)) {
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage)) {
         return super.equals(obj);
       }
-      org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData other = (org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData) obj;
+      org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage other = (org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage) obj;
 
       boolean result = true;
-      result = result && (hasTable() == other.hasTable());
-      if (hasTable()) {
-        result = result && getTable()
-            .equals(other.getTable());
+      result = result && (hasBackupId() == other.hasBackupId());
+      if (hasBackupId()) {
+        result = result && getBackupId()
+            .equals(other.getBackupId());
+      }
+      result = result && (hasBackupType() == other.hasBackupType());
+      if (hasBackupType()) {
+        result = result &&
+            (getBackupType() == other.getBackupType());
       }
-      result = result && (hasSnapshotName() == other.hasSnapshotName());
-      if (hasSnapshotName()) {
-        result = result && getSnapshotName()
-            .equals(other.getSnapshotName());
+      result = result && (hasRootDir() == other.hasRootDir());
+      if (hasRootDir()) {
+        result = result && getRootDir()
+            .equals(other.getRootDir());
+      }
+      result = result && getTableListList()
+          .equals(other.getTableListList());
+      result = result && (hasStartTs() == other.hasStartTs());
+      if (hasStartTs()) {
+        result = result && (getStartTs()
+            == other.getStartTs());
+      }
+      result = result && (hasCompleteTs() == other.hasCompleteTs());
+      if (hasCompleteTs()) {
+        result = result && (getCompleteTs()
+            == other.getCompleteTs());
       }
+      result = result && getAncestorsList()
+          .equals(other.getAncestorsList());
       result = result &&
           getUnknownFields().equals(other.getUnknownFields());
       return result;
@@ -584,66 +749,86 @@ public final class BackupProtos {
       }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasTable()) {
-        hash = (37 * hash) + TABLE_FIELD_NUMBER;
-        hash = (53 * hash) + getTable().hashCode();
+      if (hasBackupId()) {
+        hash = (37 * hash) + BACKUP_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getBackupId().hashCode();
+      }
+      if (hasBackupType()) {
+        hash = (37 * hash) + BACKUP_TYPE_FIELD_NUMBER;
+        hash = (53 * hash) + hashEnum(getBackupType());
+      }
+      if (hasRootDir()) {
+        hash = (37 * hash) + ROOT_DIR_FIELD_NUMBER;
+        hash = (53 * hash) + getRootDir().hashCode();
       }
-      if (hasSnapshotName()) {
-        hash = (37 * hash) + SNAPSHOTNAME_FIELD_NUMBER;
-        hash = (53 * hash) + getSnapshotName().hashCode();
+      if (getTableListCount() > 0) {
+        hash = (37 * hash) + TABLE_LIST_FIELD_NUMBER;
+        hash = (53 * hash) + getTableListList().hashCode();
+      }
+      if (hasStartTs()) {
+        hash = (37 * hash) + START_TS_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getStartTs());
+      }
+      if (hasCompleteTs()) {
+        hash = (37 * hash) + COMPLETE_TS_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getCompleteTs());
+      }
+      if (getAncestorsCount() > 0) {
+        hash = (37 * hash) + ANCESTORS_FIELD_NUMBER;
+        hash = (53 * hash) + getAncestorsList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
       memoizedHashCode = hash;
       return hash;
     }
 
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(byte[] data)
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseDelimitedFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseDelimitedFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parseFrom(
+    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -652,7 +837,7 @@ public final class BackupProtos {
 
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData prototype) {
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
@@ -664,24 +849,24 @@ public final class BackupProtos {
       return builder;
     }
     /**
-     * Protobuf type {@code hbase.pb.SnapshotTableStateData}
+     * Protobuf type {@code hbase.pb.BackupImage}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateDataOrBuilder {
+       implements org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_SnapshotTableStateData_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_SnapshotTableStateData_fieldAccessorTable
+        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.class, org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.Builder.class);
+                org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.class, org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.Builder.class);
       }
 
-      // Construct using org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.newBuilder()
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -693,7 +878,8 @@ public final class BackupProtos {
       }
       private void maybeForceBuilderInitialization() {
         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getTableFieldBuilder();
+          getTableListFieldBuilder();
+          getAncestorsFieldBuilder();
         }
       }
       private static Builder create() {
@@ -702,14 +888,28 @@ public final class BackupProtos {
 
       public Builder clear() {
         super.clear();
-        if (tableBuilder_ == null) {
-          table_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-        } else {
-          tableBuilder_.clear();
-        }
+        backupId_ = "";
         bitField0_ = (bitField0_ & ~0x00000001);
-        snapshotName_ = "";
+        backupType_ = org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType.FULL;
         bitField0_ = (bitField0_ & ~0x00000002);
+        rootDir_ = "";
+        bitField0_ = (bitField0_ & ~0x00000004);
+        if (tableListBuilder_ == null) {
+          tableList_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000008);
+        } else {
+          tableListBuilder_.clear();
+        }
+        startTs_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000010);
+        completeTs_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000020);
+        if (ancestorsBuilder_ == null) {
+          ancestors_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000040);
+        } else {
+          ancestorsBuilder_.clear();
+        }
         return this;
       }
 
@@ -719,2731 +919,997 @@ public final class BackupProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_SnapshotTableStateData_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_descriptor;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.getDefaultInstance();
+      public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData build() {
-        org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData result = buildPartial();
+      public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage build() {
+        org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData buildPartial() {
-        org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData result = new org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData(this);
+      public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage result = new org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage(this);
         int from_bitField0_ = bitField0_;
         int to_bitField0_ = 0;
         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
           to_bitField0_ |= 0x00000001;
         }
-        if (tableBuilder_ == null) {
-          result.table_ = table_;
-        } else {
-          result.table_ = tableBuilder_.build();
-        }
+        result.backupId_ = backupId_;
         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
           to_bitField0_ |= 0x00000002;
         }
-        result.snapshotName_ = snapshotName_;
+        result.backupType_ = backupType_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.rootDir_ = rootDir_;
+        if (tableListBuilder_ == null) {
+          if (((bitField0_ & 0x00000008) == 0x00000008)) {
+            tableList_ = java.util.Collections.unmodifiableList(tableList_);
+            bitField0_ = (bitField0_ & ~0x00000008);
+          }
+          result.tableList_ = tableList_;
+        } else {
+          result.tableList_ = tableListBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.startTs_ = startTs_;
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        result.completeTs_ = completeTs_;
+        if (ancestorsBuilder_ == null) {
+          if (((bitField0_ & 0x00000040) == 0x00000040)) {
+            ancestors_ = java.util.Collections.unmodifiableList(ancestors_);
+            bitField0_ = (bitField0_ & ~0x00000040);
+          }
+          result.ancestors_ = ancestors_;
+        } else {
+          result.ancestors_ = ancestorsBuilder_.build();
+        }
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData) {
-          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData)other);
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData other) {
-        if (other == org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData.getDefaultInstance()) return this;
-        if (other.hasTable()) {
-          mergeTable(other.getTable());
-        }
-        if (other.hasSnapshotName()) {
-          bitField0_ |= 0x00000002;
-          snapshotName_ = other.snapshotName_;
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.getDefaultInstance()) return this;
+        if (other.hasBackupId()) {
+          bitField0_ |= 0x00000001;
+          backupId_ = other.backupId_;
           onChanged();
         }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        if (!hasTable()) {
-          
-          return false;
-        }
-        if (!hasSnapshotName()) {
-          
-          return false;
+        if (other.hasBackupType()) {
+          setBackupType(other.getBackupType());
         }
-        if (!getTable().isInitialized()) {
-          
-          return false;
+        if (other.hasRootDir()) {
+          bitField0_ |= 0x00000004;
+          rootDir_ = other.rootDir_;
+          onChanged();
         }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.BackupProtos.SnapshotTableStateData) e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
+        if (tableListBuilder_ == null) {
+          if (!other.tableList_.isEmpty()) {
+            if (tableList_.isEmpty()) {
+              tableList_ = other.tableList_;
+              bitField0_ = (bitField0_ & ~0x00000008);
+            } else {
+              ensureTableListIsMutable();
+              tableList_.addAll(other.tableList_);
+            }
+            onChanged();
           }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      // required .hbase.pb.TableName table = 1;
-      private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName table_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableBuilder_;
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public boolean hasTable() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTable() {
-        if (tableBuilder_ == null) {
-          return table_;
         } else {
-          return tableBuilder_.getMessage();
-        }
-      }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public Builder setTable(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
-        if (tableBuilder_ == null) {
-          if (value == null) {
-            throw new NullPointerException();
+          if (!other.tableList_.isEmpty()) {
+            if (tableListBuilder_.isEmpty()) {
+              tableListBuilder_.dispose();
+              tableListBuilder_ = null;
+              tableList_ = other.tableList_;
+              bitField0_ = (bitField0_ & ~0x00000008);
+              tableListBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getTableListFieldBuilder() : null;
+            } else {
+              tableListBuilder_.addAllMessages(other.tableList_);
+            }
           }
-          table_ = value;
-          onChanged();
-        } else {
-          tableBuilder_.setMessage(value);
         }
-        bitField0_ |= 0x00000001;
-        return this;
-      }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public Builder setTable(
-          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
-        if (tableBuilder_ == null) {
-          table_ = builderForValue.build();
-          onChanged();
-        } else {
-          tableBuilder_.setMessage(builderForValue.build());
+        if (other.hasStartTs()) {
+          setStartTs(other.getStartTs());
         }
-        bitField0_ |= 0x00000001;
-        return this;
-      }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public Builder mergeTable(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
-        if (tableBuilder_ == null) {
-          if (((bitField0_ & 0x00000001) == 0x00000001) &&
-              table_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
-            table_ =
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(table_).mergeFrom(value).buildPartial();
-          } else {
-            table_ = value;
+        if (other.hasCompleteTs()) {
+          setCompleteTs(other.getCompleteTs());
+        }
+        if (ancestorsBuilder_ == null) {
+          if (!other.ancestors_.isEmpty()) {
+            if (ancestors_.isEmpty()) {
+              ancestors_ = other.ancestors_;
+              bitField0_ = (bitField0_ & ~0x00000040);
+            } else {
+              ensureAncestorsIsMutable();
+              ancestors_.addAll(other.ancestors_);
+            }
+            onChanged();
           }
-          onChanged();
         } else {
-          tableBuilder_.mergeFrom(value);
+          if (!other.ancestors_.isEmpty()) {
+            if (ancestorsBuilder_.isEmpty()) {
+              ancestorsBuilder_.dispose();
+              ancestorsBuilder_ = null;
+              ancestors_ = other.ancestors_;
+              bitField0_ = (bitField0_ & ~0x00000040);
+              ancestorsBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getAncestorsFieldBuilder() : null;
+            } else {
+              ancestorsBuilder_.addAllMessages(other.ancestors_);
+            }
+          }
         }
-        bitField0_ |= 0x00000001;
+        this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public Builder clearTable() {
-        if (tableBuilder_ == null) {
-          table_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-          onChanged();
-        } else {
-          tableBuilder_.clear();
+
+      public final boolean isInitialized() {
+        if (!hasBackupId()) {
+          
+          return false;
         }
-        bitField0_ = (bitField0_ & ~0x00000001);
-        return this;
-      }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableBuilder() {
-        bitField0_ |= 0x00000001;
-        onChanged();
-        return getTableFieldBuilder().getBuilder();
-      }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableOrBuilder() {
-        if (tableBuilder_ != null) {
-          return tableBuilder_.getMessageOrBuilder();
-        } else {
-          return table_;
+        if (!hasBackupType()) {
+          
+          return false;
+        }
+        if (!hasRootDir()) {
+          
+          return false;
+        }
+        if (!hasStartTs()) {
+          
+          return false;
+        }
+        if (!hasCompleteTs()) {
+          
+          return false;
+        }
+        for (int i = 0; i < getTableListCount(); i++) {
+          if (!getTableList(i).isInitialized()) {
+            
+            return false;
+          }
+        }
+        for (int i = 0; i < getAncestorsCount(); i++) {
+          if (!getAncestors(i).isInitialized()) {
+            
+            return false;
+          }
         }
+        return true;
       }
-      /**
-       * <code>required .hbase.pb.TableName table = 1;</code>
-       */
-      private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
-          getTableFieldBuilder() {
-        if (tableBuilder_ == null) {
-          tableBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
-                  table_,
-                  getParentForChildren(),
-                  isClean());
-          table_ = null;
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
         }
-        return tableBuilder_;
+        return this;
       }
+      private int bitField0_;
 
-      // required string snapshotName = 2;
-      private java.lang.Object snapshotName_ = "";
+      // required string backup_id = 1;
+      private java.lang.Object backupId_ = "";
       /**
-       * <code>required string snapshotName = 2;</code>
+       * <code>required string backup_id = 1;</code>
        */
-      public boolean hasSnapshotName() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
+      public boolean hasBackupId() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * <code>required string snapshotName = 2;</code>
+       * <code>required string backup_id = 1;</code>
        */
-      public java.lang.String getSnapshotName() {
-        java.lang.Object ref = snapshotName_;
+      public java.lang.String getBackupId() {
+        java.lang.Object ref = backupId_;
         if (!(ref instanceof java.lang.String)) {
           java.lang.String s = ((com.google.protobuf.ByteString) ref)
               .toStringUtf8();
-          snapshotName_ = s;
+          backupId_ = s;
           return s;
         } else {
           return (java.lang.String) ref;
         }
       }
       /**
-       * <code>required string snapshotName = 2;</code>
+       * <code>required string backup_id = 1;</code>
        */
       public com.google.protobuf.ByteString
-          getSnapshotNameBytes() {
-        java.lang.Object ref = snapshotName_;
+          getBackupIdBytes() {
+        java.lang.Object ref = backupId_;
         if (ref instanceof String) {
           com.google.protobuf.ByteString b = 
               com.google.protobuf.ByteString.copyFromUtf8(
                   (java.lang.String) ref);
-          snapshotName_ = b;
+          backupId_ = b;
           return b;
         } else {
           return (com.google.protobuf.ByteString) ref;
         }
       }
       /**
-       * <code>required string snapshotName = 2;</code>
+       * <code>required string backup_id = 1;</code>
        */
-      public Builder setSnapshotName(
+      public Builder setBackupId(
           java.lang.String value) {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000002;
-        snapshotName_ = value;
+  bitField0_ |= 0x00000001;
+        backupId_ = value;
         onChanged();
         return this;
       }
       /**
-       * <code>required string snapshotName = 2;</code>
+       * <code>required string backup_id = 1;</code>
        */
-      public Builder clearSnapshotName() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        snapshotName_ = getDefaultInstance().getSnapshotName();
+      public Builder clearBackupId() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        backupId_ = getDefaultInstance().getBackupId();
         onChanged();
         return this;
       }
       /**
-       * <code>required string snapshotName = 2;</code>
+       * <code>required string backup_id = 1;</code>
        */
-      public Builder setSnapshotNameBytes(
+      public Builder setBackupIdBytes(
           com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000002;
-        snapshotName_ = value;
+  bitField0_ |= 0x00000001;
+        backupId_ = value;
         onChanged();
         return this;
       }
 
-      // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotTableStateData)
-    }
-
-    static {
-      defaultInstance = new SnapshotTableStateData(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotTableStateData)
-  }
-
-  public interface BackupImageOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // required string backup_id = 1;
-    /**
-     * <code>required string backup_id = 1;</code>
-     */
-    boolean hasBackupId();
-    /**
-     * <code>required string backup_id = 1;</code>
-     */
-    java.lang.String getBackupId();
-    /**
-     * <code>required string backup_id = 1;</code>
-     */
-    com.google.protobuf.ByteString
-        getBackupIdBytes();
-
-    // required .hbase.pb.BackupType backup_type = 2;
-    /**
-     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
-     */
-    boolean hasBackupType();
-    /**
-     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
-     */
-    org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType getBackupType();
-
-    // required string root_dir = 3;
-    /**
-     * <code>required string root_dir = 3;</code>
-     */
-    boolean hasRootDir();
-    /**
-     * <code>required string root_dir = 3;</code>
-     */
-    java.lang.String getRootDir();
-    /**
-     * <code>required string root_dir = 3;</code>
-     */
-    com.google.protobuf.ByteString
-        getRootDirBytes();
-
-    // repeated .hbase.pb.TableName table_list = 4;
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> 
-        getTableListList();
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableList(int index);
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    int getTableListCount();
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
-        getTableListOrBuilderList();
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableListOrBuilder(
-        int index);
-
-    // required uint64 start_ts = 5;
-    /**
-     * <code>required uint64 start_ts = 5;</code>
-     */
-    boolean hasStartTs();
-    /**
-     * <code>required uint64 start_ts = 5;</code>
-     */
-    long getStartTs();
-
-    // required uint64 complete_ts = 6;
-    /**
-     * <code>required uint64 complete_ts = 6;</code>
-     */
-    boolean hasCompleteTs();
-    /**
-     * <code>required uint64 complete_ts = 6;</code>
-     */
-    long getCompleteTs();
-
-    // repeated .hbase.pb.BackupImage ancestors = 7;
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    java.util.List<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage> 
-        getAncestorsList();
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage getAncestors(int index);
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    int getAncestorsCount();
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder> 
-        getAncestorsOrBuilderList();
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder getAncestorsOrBuilder(
-        int index);
-  }
-  /**
-   * Protobuf type {@code hbase.pb.BackupImage}
-   */
-  public static final class BackupImage extends
-      com.google.protobuf.GeneratedMessage
-      implements BackupImageOrBuilder {
-    // Use BackupImage.newBuilder() to construct.
-    private BackupImage(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
-      super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private BackupImage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final BackupImage defaultInstance;
-    public static BackupImage getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public BackupImage getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private BackupImage(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              backupId_ = input.readBytes();
-              break;
-            }
-            case 16: {
-              int rawValue = input.readEnum();
-              org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType value = org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(2, rawValue);
-              } else {
-                bitField0_ |= 0x00000002;
-                backupType_ = value;
-              }
-              break;
-            }
-            case 26: {
-              bitField0_ |= 0x00000004;
-              rootDir_ = input.readBytes();
-              break;
-            }
-            case 34: {
-              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
-                tableList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>();
-                mutable_bitField0_ |= 0x00000008;
-              }
-              tableList_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
-              break;
-            }
-            case 40: {
-              bitField0_ |= 0x00000008;
-              startTs_ = input.readUInt64();
-              break;
-            }
-            case 48: {
-              bitField0_ |= 0x00000010;
-              completeTs_ = input.readUInt64();
-              break;
-            }
-            case 58: {
-              if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
-                ancestors_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage>();
-                mutable_bitField0_ |= 0x00000040;
-              }
-              ancestors_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.PARSER, extensionRegistry));
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
-          tableList_ = java.util.Collections.unmodifiableList(tableList_);
-        }
-        if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
-          ancestors_ = java.util.Collections.unmodifiableList(ancestors_);
-        }
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.BackupProtos.internal_static_hbase_pb_BackupImage_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.class, org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<BackupImage> PARSER =
-        new com.google.protobuf.AbstractParser<BackupImage>() {
-      public BackupImage parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new BackupImage(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<BackupImage> getParserForType() {
-      return PARSER;
-    }
-
-    private int bitField0_;
-    // required string backup_id = 1;
-    public static final int BACKUP_ID_FIELD_NUMBER = 1;
-    private java.lang.Object backupId_;
-    /**
-     * <code>required string backup_id = 1;</code>
-     */
-    public boolean hasBackupId() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>required string backup_id = 1;</code>
-     */
-    public java.lang.String getBackupId() {
-      java.lang.Object ref = backupId_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        com.google.protobuf.ByteString bs = 
-            (com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          backupId_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>required string backup_id = 1;</code>
-     */
-    public com.google.protobuf.ByteString
-        getBackupIdBytes() {
-      java.lang.Object ref = backupId_;
-      if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
-            com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        backupId_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // required .hbase.pb.BackupType backup_type = 2;
-    public static final int BACKUP_TYPE_FIELD_NUMBER = 2;
-    private org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType backupType_;
-    /**
-     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
-     */
-    public boolean hasBackupType() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    /**
-     * <code>required .hbase.pb.BackupType backup_type = 2;</code>
-     */
-    public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType getBackupType() {
-      return backupType_;
-    }
-
-    // required string root_dir = 3;
-    public static final int ROOT_DIR_FIELD_NUMBER = 3;
-    private java.lang.Object rootDir_;
-    /**
-     * <code>required string root_dir = 3;</code>
-     */
-    public boolean hasRootDir() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    /**
-     * <code>required string root_dir = 3;</code>
-     */
-    public java.lang.String getRootDir() {
-      java.lang.Object ref = rootDir_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        com.google.protobuf.ByteString bs = 
-            (com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          rootDir_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>required string root_dir = 3;</code>
-     */
-    public com.google.protobuf.ByteString
-        getRootDirBytes() {
-      java.lang.Object ref = rootDir_;
-      if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
-            com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        rootDir_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // repeated .hbase.pb.TableName table_list = 4;
-    public static final int TABLE_LIST_FIELD_NUMBER = 4;
-    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableList_;
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableListList() {
-      return tableList_;
-    }
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
-        getTableListOrBuilderList() {
-      return tableList_;
-    }
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    public int getTableListCount() {
-      return tableList_.size();
-    }
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableList(int index) {
-      return tableList_.get(index);
-    }
-    /**
-     * <code>repeated .hbase.pb.TableName table_list = 4;</code>
-     */
-    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableListOrBuilder(
-        int index) {
-      return tableList_.get(index);
-    }
-
-    // required uint64 start_ts = 5;
-    public static final int START_TS_FIELD_NUMBER = 5;
-    private long startTs_;
-    /**
-     * <code>required uint64 start_ts = 5;</code>
-     */
-    public boolean hasStartTs() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    /**
-     * <code>required uint64 start_ts = 5;</code>
-     */
-    public long getStartTs() {
-      return startTs_;
-    }
-
-    // required uint64 complete_ts = 6;
-    public static final int COMPLETE_TS_FIELD_NUMBER = 6;
-    private long completeTs_;
-    /**
-     * <code>required uint64 complete_ts = 6;</code>
-     */
-    public boolean hasCompleteTs() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    /**
-     * <code>required uint64 complete_ts = 6;</code>
-     */
-    public long getCompleteTs() {
-      return completeTs_;
-    }
-
-    // repeated .hbase.pb.BackupImage ancestors = 7;
-    public static final int ANCESTORS_FIELD_NUMBER = 7;
-    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage> ancestors_;
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage> getAncestorsList() {
-      return ancestors_;
-    }
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder> 
-        getAncestorsOrBuilderList() {
-      return ancestors_;
-    }
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    public int getAncestorsCount() {
-      return ancestors_.size();
-    }
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage getAncestors(int index) {
-      return ancestors_.get(index);
-    }
-    /**
-     * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code>
-     */
-    public org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImageOrBuilder getAncestorsOrBuilder(
-        int index) {
-      return ancestors_.get(index);
-    }
-
-    private void initFields() {
-      backupId_ = "";
-      backupType_ = org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupType.FULL;
-      rootDir_ = "";
-      tableList_ = java.util.Collections.emptyList();
-      startTs_ = 0L;
-      completeTs_ = 0L;
-      ancestors_ = java.util.Collections.emptyList();
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      if (!hasBackupId()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasBackupType()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasRootDir()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasStartTs()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasCompleteTs()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      for (int i = 0; i < getTableListCount(); i++) {
-        if (!getTableList(i).isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      for (int i = 0; i < getAncestorsCount(); i++) {
-        if (!getAncestors(i).isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, getBackupIdBytes());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeEnum(2, backupType_.getNumber());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeBytes(3, getRootDirBytes());
-      }
-      for (int i = 0; i < tableList_.size(); i++) {
-        output.writeMessage(4, tableList_.get(i));
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeUInt64(5, startTs_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeUInt64(6, completeTs_);
-      }
-      for (int i = 0; i < ancestors_.size(); i++) {
-        output.writeMessage(7, ancestors_.get(i));
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, getBackupIdBytes());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeEnumSize(2, backupType_.getNumber());
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(3, getRootDirBytes());
-      }
-      for (int i = 0; i < tableList_.size(); i++) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(4, tableList_.get(i));
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeUInt64Size(5, startTs_);
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeUInt64Size(6, completeTs_);
-      }
-      for (int i = 0; i < ancestors_.size(); i++) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(7, ancestors_.get(i));
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage)) {
-        return super.equals(obj);
-      }
-      org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage other = (org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage) obj;
-
-      boolean result = true;
-      result = result && (hasBackupId() == other.hasBackupId());
-      if (hasBackupId()) {
-        result = result && getBackupId()
-            .equals(other.getBackupId());
-      }
-      result = result && (hasBackupType() == other.hasBackupType());
-      if (hasBackupType()) {
-        result = result &&
-            (getBackupType() == other.getBackupType());
-      }
-      result = result && (hasRootDir() == other.hasRootDir());
-      if (hasRootDir()) {
-        result = result && getRootDir()
-            .equals(other.getRootDir());
-      }
-      result = result && getTableListList()
-          .equals(other.getTableListList());
-      result = result && (hasStartTs() == other.hasStartTs());
-      if (hasStartTs()) {
-        result = result && (getStartTs()
-            == other.getStartTs());
-      }
-      result = result && (hasCompleteTs() == other.hasCompleteTs());
-      if (hasCompleteTs()) {
-        result = result && (getCompleteTs()
-            == other.getCompleteTs());
-      }
-      result = result && getAncestorsList()
-          .equals(other.getAncestorsList());
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    private int memoizedHashCode = 0;
-    @java.lang.Override
-    public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasBackupId()) {
-        hash = (37 * hash) + BACKUP_ID_FIELD_NUMBER;
-        hash = (53 * hash) + getBackupId().hashCode();
-      }
-      if (hasBackupType()) {
-        hash = (37 * hash) + BACKUP_TYPE_FIELD_NUMBER;
-        hash = (53 * hash) + hashEnum(getBackupType());
-      }
-      if (hasRootDir()) {
-        hash = (37 * hash) + ROOT_DIR_FIELD_NUMBER;
-        hash = (53 * hash) + getRootDir().hashCode();
-      }
-      if (getTableListCount() > 0) {
-        hash = (37 * hash) + TABLE_LIST_FIELD_NUMBER;
-        hash = (53 * hash) + getTableListList().hashCode();
-      }
-      if (hasStartTs()) {
-        hash = (37 * hash) + START_TS_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getStartTs());
-      }
-      if (hasCompleteTs()) {
-        hash = (37 * hash) + COMPLETE_TS_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getCompleteTs());
-      }
-      if (getAncestorsCount() > 0) {
-        hash = (37 * hash) + ANCESTORS_FIELD_NUMBER;
-        hash = (53 * hash) + getAncestorsList().hashCode();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      memoizedHashCode = hash;
-      return hash;
-    }
-
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupProtos.BackupImage parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.protobuf.generated.BackupPro

<TRUNCATED>