You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/04/11 05:52:57 UTC

svn commit: r1466761 [3/41] - in /hbase/branches/0.95: ./ hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-pr...

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java?rev=1466761&r1=1466760&r2=1466761&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java Thu Apr 11 03:52:56 2013
@@ -10,178 +10,68 @@ public final class AdminProtos {
   }
   public interface GetRegionInfoRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-
+    
     // required .RegionSpecifier region = 1;
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     boolean hasRegion();
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
-
+    
     // optional bool compactionState = 2;
-    /**
-     * <code>optional bool compactionState = 2;</code>
-     */
     boolean hasCompactionState();
-    /**
-     * <code>optional bool compactionState = 2;</code>
-     */
     boolean getCompactionState();
   }
-  /**
-   * Protobuf type {@code GetRegionInfoRequest}
-   */
   public static final class GetRegionInfoRequest extends
       com.google.protobuf.GeneratedMessage
       implements GetRegionInfoRequestOrBuilder {
     // Use GetRegionInfoRequest.newBuilder() to construct.
-    private GetRegionInfoRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private GetRegionInfoRequest(Builder builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private GetRegionInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+    private GetRegionInfoRequest(boolean noInit) {}
+    
     private static final GetRegionInfoRequest defaultInstance;
     public static GetRegionInfoRequest getDefaultInstance() {
       return defaultInstance;
     }
-
+    
     public GetRegionInfoRequest getDefaultInstanceForType() {
       return defaultInstance;
     }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private GetRegionInfoRequest(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
-              if (((bitField0_ & 0x00000001) == 0x00000001)) {
-                subBuilder = region_.toBuilder();
-              }
-              region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
-              if (subBuilder != null) {
-                subBuilder.mergeFrom(region_);
-                region_ = subBuilder.buildPartial();
-              }
-              bitField0_ |= 0x00000001;
-              break;
-            }
-            case 16: {
-              bitField0_ |= 0x00000002;
-              compactionState_ = input.readBool();
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
+    
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor;
     }
-
+    
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<GetRegionInfoRequest> PARSER =
-        new com.google.protobuf.AbstractParser<GetRegionInfoRequest>() {
-      public GetRegionInfoRequest parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new GetRegionInfoRequest(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<GetRegionInfoRequest> getParserForType() {
-      return PARSER;
+      return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable;
     }
-
+    
     private int bitField0_;
     // required .RegionSpecifier region = 1;
     public static final int REGION_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     public boolean hasRegion() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
       return region_;
     }
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
       return region_;
     }
-
+    
     // optional bool compactionState = 2;
     public static final int COMPACTIONSTATE_FIELD_NUMBER = 2;
     private boolean compactionState_;
-    /**
-     * <code>optional bool compactionState = 2;</code>
-     */
     public boolean hasCompactionState() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
-    /**
-     * <code>optional bool compactionState = 2;</code>
-     */
     public boolean getCompactionState() {
       return compactionState_;
     }
-
+    
     private void initFields() {
       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
       compactionState_ = false;
@@ -190,7 +80,7 @@ public final class AdminProtos {
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-
+      
       if (!hasRegion()) {
         memoizedIsInitialized = 0;
         return false;
@@ -202,7 +92,7 @@ public final class AdminProtos {
       memoizedIsInitialized = 1;
       return true;
     }
-
+    
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -214,12 +104,12 @@ public final class AdminProtos {
       }
       getUnknownFields().writeTo(output);
     }
-
+    
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-
+    
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -233,14 +123,14 @@ public final class AdminProtos {
       memoizedSerializedSize = size;
       return size;
     }
-
+    
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-
+    
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -250,7 +140,7 @@ public final class AdminProtos {
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj;
-
+      
       boolean result = true;
       result = result && (hasRegion() == other.hasRegion());
       if (hasRegion()) {
@@ -266,13 +156,9 @@ public final class AdminProtos {
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-
-    private int memoizedHashCode = 0;
+    
     @java.lang.Override
     public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasRegion()) {
@@ -284,79 +170,89 @@ public final class AdminProtos {
         hash = (53 * hash) + hashBoolean(getCompactionState());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
-      memoizedHashCode = hash;
       return hash;
     }
-
+    
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
+      return newBuilder().mergeFrom(data).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
+      return newBuilder().mergeFrom(data).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return newBuilder().mergeFrom(input).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return newBuilder().mergeFrom(input).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
     }
-
+    
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-
+    
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
-    /**
-     * Protobuf type {@code GetRegionInfoRequest}
-     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder {
@@ -364,21 +260,18 @@ public final class AdminProtos {
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor;
       }
-
+      
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class);
+        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable;
       }
-
+      
       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-
-      private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      
+      private Builder(BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -390,7 +283,7 @@ public final class AdminProtos {
       private static Builder create() {
         return new Builder();
       }
-
+      
       public Builder clear() {
         super.clear();
         if (regionBuilder_ == null) {
@@ -403,20 +296,20 @@ public final class AdminProtos {
         bitField0_ = (bitField0_ & ~0x00000002);
         return this;
       }
-
+      
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-
+      
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDescriptor();
       }
-
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance();
       }
-
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() {
         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial();
         if (!result.isInitialized()) {
@@ -424,7 +317,17 @@ public final class AdminProtos {
         }
         return result;
       }
-
+      
+      private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this);
         int from_bitField0_ = bitField0_;
@@ -445,7 +348,7 @@ public final class AdminProtos {
         onBuilt();
         return result;
       }
-
+      
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other);
@@ -454,7 +357,7 @@ public final class AdminProtos {
           return this;
         }
       }
-
+      
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this;
         if (other.hasRegion()) {
@@ -466,7 +369,7 @@ public final class AdminProtos {
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-
+      
       public final boolean isInitialized() {
         if (!hasRegion()) {
           
@@ -478,39 +381,57 @@ public final class AdminProtos {
         }
         return true;
       }
-
+      
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+              if (hasRegion()) {
+                subBuilder.mergeFrom(getRegion());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setRegion(subBuilder.buildPartial());
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              compactionState_ = input.readBool();
+              break;
+            }
           }
         }
-        return this;
       }
+      
       private int bitField0_;
-
+      
       // required .RegionSpecifier region = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public boolean hasRegion() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
         if (regionBuilder_ == null) {
           return region_;
@@ -518,9 +439,6 @@ public final class AdminProtos {
           return regionBuilder_.getMessage();
         }
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
         if (regionBuilder_ == null) {
           if (value == null) {
@@ -534,9 +452,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder setRegion(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
         if (regionBuilder_ == null) {
@@ -548,9 +463,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
         if (regionBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
@@ -567,9 +479,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder clearRegion() {
         if (regionBuilder_ == null) {
           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
@@ -580,17 +489,11 @@ public final class AdminProtos {
         bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getRegionFieldBuilder().getBuilder();
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
         if (regionBuilder_ != null) {
           return regionBuilder_.getMessageOrBuilder();
@@ -598,9 +501,6 @@ public final class AdminProtos {
           return region_;
         }
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
           getRegionFieldBuilder() {
@@ -614,235 +514,95 @@ public final class AdminProtos {
         }
         return regionBuilder_;
       }
-
+      
       // optional bool compactionState = 2;
       private boolean compactionState_ ;
-      /**
-       * <code>optional bool compactionState = 2;</code>
-       */
       public boolean hasCompactionState() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
-      /**
-       * <code>optional bool compactionState = 2;</code>
-       */
       public boolean getCompactionState() {
         return compactionState_;
       }
-      /**
-       * <code>optional bool compactionState = 2;</code>
-       */
       public Builder setCompactionState(boolean value) {
         bitField0_ |= 0x00000002;
         compactionState_ = value;
         onChanged();
         return this;
       }
-      /**
-       * <code>optional bool compactionState = 2;</code>
-       */
       public Builder clearCompactionState() {
         bitField0_ = (bitField0_ & ~0x00000002);
         compactionState_ = false;
         onChanged();
         return this;
       }
-
+      
       // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest)
     }
-
+    
     static {
       defaultInstance = new GetRegionInfoRequest(true);
       defaultInstance.initFields();
     }
-
+    
     // @@protoc_insertion_point(class_scope:GetRegionInfoRequest)
   }
-
+  
   public interface GetRegionInfoResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-
+    
     // required .RegionInfo regionInfo = 1;
-    /**
-     * <code>required .RegionInfo regionInfo = 1;</code>
-     */
     boolean hasRegionInfo();
-    /**
-     * <code>required .RegionInfo regionInfo = 1;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo();
-    /**
-     * <code>required .RegionInfo regionInfo = 1;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder();
-
+    
     // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
-    /**
-     * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-     */
     boolean hasCompactionState();
-    /**
-     * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState();
   }
-  /**
-   * Protobuf type {@code GetRegionInfoResponse}
-   */
   public static final class GetRegionInfoResponse extends
       com.google.protobuf.GeneratedMessage
       implements GetRegionInfoResponseOrBuilder {
     // Use GetRegionInfoResponse.newBuilder() to construct.
-    private GetRegionInfoResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private GetRegionInfoResponse(Builder builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private GetRegionInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+    private GetRegionInfoResponse(boolean noInit) {}
+    
     private static final GetRegionInfoResponse defaultInstance;
     public static GetRegionInfoResponse getDefaultInstance() {
       return defaultInstance;
     }
-
+    
     public GetRegionInfoResponse getDefaultInstanceForType() {
       return defaultInstance;
     }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private GetRegionInfoResponse(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null;
-              if (((bitField0_ & 0x00000001) == 0x00000001)) {
-                subBuilder = regionInfo_.toBuilder();
-              }
-              regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry);
-              if (subBuilder != null) {
-                subBuilder.mergeFrom(regionInfo_);
-                regionInfo_ = subBuilder.buildPartial();
-              }
-              bitField0_ |= 0x00000001;
-              break;
-            }
-            case 16: {
-              int rawValue = input.readEnum();
-              org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue);
-              if (value == null) {
-                unknownFields.mergeVarintField(2, rawValue);
-              } else {
-                bitField0_ |= 0x00000002;
-                compactionState_ = value;
-              }
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
+    
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor;
     }
-
+    
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<GetRegionInfoResponse> PARSER =
-        new com.google.protobuf.AbstractParser<GetRegionInfoResponse>() {
-      public GetRegionInfoResponse parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new GetRegionInfoResponse(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<GetRegionInfoResponse> getParserForType() {
-      return PARSER;
+      return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable;
     }
-
-    /**
-     * Protobuf enum {@code GetRegionInfoResponse.CompactionState}
-     */
+    
     public enum CompactionState
         implements com.google.protobuf.ProtocolMessageEnum {
-      /**
-       * <code>NONE = 0;</code>
-       */
       NONE(0, 0),
-      /**
-       * <code>MINOR = 1;</code>
-       */
       MINOR(1, 1),
-      /**
-       * <code>MAJOR = 2;</code>
-       */
       MAJOR(2, 2),
-      /**
-       * <code>MAJOR_AND_MINOR = 3;</code>
-       */
       MAJOR_AND_MINOR(3, 3),
       ;
-
-      /**
-       * <code>NONE = 0;</code>
-       */
+      
       public static final int NONE_VALUE = 0;
-      /**
-       * <code>MINOR = 1;</code>
-       */
       public static final int MINOR_VALUE = 1;
-      /**
-       * <code>MAJOR = 2;</code>
-       */
       public static final int MAJOR_VALUE = 2;
-      /**
-       * <code>MAJOR_AND_MINOR = 3;</code>
-       */
       public static final int MAJOR_AND_MINOR_VALUE = 3;
-
-
+      
+      
       public final int getNumber() { return value; }
-
+      
       public static CompactionState valueOf(int value) {
         switch (value) {
           case 0: return NONE;
@@ -852,7 +612,7 @@ public final class AdminProtos {
           default: return null;
         }
       }
-
+      
       public static com.google.protobuf.Internal.EnumLiteMap<CompactionState>
           internalGetValueMap() {
         return internalValueMap;
@@ -864,7 +624,7 @@ public final class AdminProtos {
                 return CompactionState.valueOf(number);
               }
             };
-
+      
       public final com.google.protobuf.Descriptors.EnumValueDescriptor
           getValueDescriptor() {
         return getDescriptor().getValues().get(index);
@@ -877,9 +637,11 @@ public final class AdminProtos {
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0);
       }
-
-      private static final CompactionState[] VALUES = values();
-
+      
+      private static final CompactionState[] VALUES = {
+        NONE, MINOR, MAJOR, MAJOR_AND_MINOR, 
+      };
+      
       public static CompactionState valueOf(
           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
         if (desc.getType() != getDescriptor()) {
@@ -888,57 +650,42 @@ public final class AdminProtos {
         }
         return VALUES[desc.getIndex()];
       }
-
+      
       private final int index;
       private final int value;
-
+      
       private CompactionState(int index, int value) {
         this.index = index;
         this.value = value;
       }
-
+      
       // @@protoc_insertion_point(enum_scope:GetRegionInfoResponse.CompactionState)
     }
-
+    
     private int bitField0_;
     // required .RegionInfo regionInfo = 1;
     public static final int REGIONINFO_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_;
-    /**
-     * <code>required .RegionInfo regionInfo = 1;</code>
-     */
     public boolean hasRegionInfo() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
-    /**
-     * <code>required .RegionInfo regionInfo = 1;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
       return regionInfo_;
     }
-    /**
-     * <code>required .RegionInfo regionInfo = 1;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
       return regionInfo_;
     }
-
+    
     // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
     public static final int COMPACTIONSTATE_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_;
-    /**
-     * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-     */
     public boolean hasCompactionState() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
-    /**
-     * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() {
       return compactionState_;
     }
-
+    
     private void initFields() {
       regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
       compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
@@ -947,7 +694,7 @@ public final class AdminProtos {
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-
+      
       if (!hasRegionInfo()) {
         memoizedIsInitialized = 0;
         return false;
@@ -959,7 +706,7 @@ public final class AdminProtos {
       memoizedIsInitialized = 1;
       return true;
     }
-
+    
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -971,12 +718,12 @@ public final class AdminProtos {
       }
       getUnknownFields().writeTo(output);
     }
-
+    
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-
+    
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -990,14 +737,14 @@ public final class AdminProtos {
       memoizedSerializedSize = size;
       return size;
     }
-
+    
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-
+    
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -1007,7 +754,7 @@ public final class AdminProtos {
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj;
-
+      
       boolean result = true;
       result = result && (hasRegionInfo() == other.hasRegionInfo());
       if (hasRegionInfo()) {
@@ -1023,13 +770,9 @@ public final class AdminProtos {
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-
-    private int memoizedHashCode = 0;
+    
     @java.lang.Override
     public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasRegionInfo()) {
@@ -1041,79 +784,89 @@ public final class AdminProtos {
         hash = (53 * hash) + hashEnum(getCompactionState());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
-      memoizedHashCode = hash;
       return hash;
     }
-
+    
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
+      return newBuilder().mergeFrom(data).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
+      return newBuilder().mergeFrom(data).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return newBuilder().mergeFrom(input).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return newBuilder().mergeFrom(input).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
     }
-
+    
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-
+    
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
-    /**
-     * Protobuf type {@code GetRegionInfoResponse}
-     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder {
@@ -1121,21 +874,18 @@ public final class AdminProtos {
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor;
       }
-
+      
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class);
+        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable;
       }
-
+      
       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-
-      private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      
+      private Builder(BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -1147,7 +897,7 @@ public final class AdminProtos {
       private static Builder create() {
         return new Builder();
       }
-
+      
       public Builder clear() {
         super.clear();
         if (regionInfoBuilder_ == null) {
@@ -1160,20 +910,20 @@ public final class AdminProtos {
         bitField0_ = (bitField0_ & ~0x00000002);
         return this;
       }
-
+      
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-
+      
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor();
       }
-
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance();
       }
-
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() {
         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial();
         if (!result.isInitialized()) {
@@ -1181,7 +931,17 @@ public final class AdminProtos {
         }
         return result;
       }
-
+      
+      private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this);
         int from_bitField0_ = bitField0_;
@@ -1202,7 +962,7 @@ public final class AdminProtos {
         onBuilt();
         return result;
       }
-
+      
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other);
@@ -1211,7 +971,7 @@ public final class AdminProtos {
           return this;
         }
       }
-
+      
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this;
         if (other.hasRegionInfo()) {
@@ -1223,7 +983,7 @@ public final class AdminProtos {
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-
+      
       public final boolean isInitialized() {
         if (!hasRegionInfo()) {
           
@@ -1235,39 +995,63 @@ public final class AdminProtos {
         }
         return true;
       }
-
+      
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder();
+              if (hasRegionInfo()) {
+                subBuilder.mergeFrom(getRegionInfo());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setRegionInfo(subBuilder.buildPartial());
+              break;
+            }
+            case 16: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(2, rawValue);
+              } else {
+                bitField0_ |= 0x00000002;
+                compactionState_ = value;
+              }
+              break;
+            }
           }
         }
-        return this;
       }
+      
       private int bitField0_;
-
+      
       // required .RegionInfo regionInfo = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_;
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public boolean hasRegionInfo() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() {
         if (regionInfoBuilder_ == null) {
           return regionInfo_;
@@ -1275,9 +1059,6 @@ public final class AdminProtos {
           return regionInfoBuilder_.getMessage();
         }
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
         if (regionInfoBuilder_ == null) {
           if (value == null) {
@@ -1291,9 +1072,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public Builder setRegionInfo(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
         if (regionInfoBuilder_ == null) {
@@ -1305,9 +1083,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
         if (regionInfoBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
@@ -1324,9 +1099,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public Builder clearRegionInfo() {
         if (regionInfoBuilder_ == null) {
           regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
@@ -1337,17 +1109,11 @@ public final class AdminProtos {
         bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getRegionInfoFieldBuilder().getBuilder();
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() {
         if (regionInfoBuilder_ != null) {
           return regionInfoBuilder_.getMessageOrBuilder();
@@ -1355,9 +1121,6 @@ public final class AdminProtos {
           return regionInfo_;
         }
       }
-      /**
-       * <code>required .RegionInfo regionInfo = 1;</code>
-       */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> 
           getRegionInfoFieldBuilder() {
@@ -1371,24 +1134,15 @@ public final class AdminProtos {
         }
         return regionInfoBuilder_;
       }
-
+      
       // optional .GetRegionInfoResponse.CompactionState compactionState = 2;
       private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
-      /**
-       * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-       */
       public boolean hasCompactionState() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
-      /**
-       * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() {
         return compactionState_;
       }
-      /**
-       * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-       */
       public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) {
         if (value == null) {
           throw new NullPointerException();
@@ -1398,233 +1152,102 @@ public final class AdminProtos {
         onChanged();
         return this;
       }
-      /**
-       * <code>optional .GetRegionInfoResponse.CompactionState compactionState = 2;</code>
-       */
       public Builder clearCompactionState() {
         bitField0_ = (bitField0_ & ~0x00000002);
         compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE;
         onChanged();
         return this;
       }
-
+      
       // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse)
     }
-
+    
     static {
       defaultInstance = new GetRegionInfoResponse(true);
       defaultInstance.initFields();
     }
-
+    
     // @@protoc_insertion_point(class_scope:GetRegionInfoResponse)
   }
-
+  
   public interface GetStoreFileRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-
+    
     // required .RegionSpecifier region = 1;
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     boolean hasRegion();
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
-
+    
     // repeated bytes family = 2;
-    /**
-     * <code>repeated bytes family = 2;</code>
-     */
     java.util.List<com.google.protobuf.ByteString> getFamilyList();
-    /**
-     * <code>repeated bytes family = 2;</code>
-     */
     int getFamilyCount();
-    /**
-     * <code>repeated bytes family = 2;</code>
-     */
     com.google.protobuf.ByteString getFamily(int index);
   }
-  /**
-   * Protobuf type {@code GetStoreFileRequest}
-   *
-   * <pre>
-   **
-   * Get a list of store files for a set of column families in a particular region.
-   * If no column family is specified, get the store files for all column families.
-   * </pre>
-   */
   public static final class GetStoreFileRequest extends
       com.google.protobuf.GeneratedMessage
       implements GetStoreFileRequestOrBuilder {
     // Use GetStoreFileRequest.newBuilder() to construct.
-    private GetStoreFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private GetStoreFileRequest(Builder builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private GetStoreFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+    private GetStoreFileRequest(boolean noInit) {}
+    
     private static final GetStoreFileRequest defaultInstance;
     public static GetStoreFileRequest getDefaultInstance() {
       return defaultInstance;
     }
-
+    
     public GetStoreFileRequest getDefaultInstanceForType() {
       return defaultInstance;
     }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private GetStoreFileRequest(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
-              if (((bitField0_ & 0x00000001) == 0x00000001)) {
-                subBuilder = region_.toBuilder();
-              }
-              region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
-              if (subBuilder != null) {
-                subBuilder.mergeFrom(region_);
-                region_ = subBuilder.buildPartial();
-              }
-              bitField0_ |= 0x00000001;
-              break;
-            }
-            case 18: {
-              if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
-                family_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
-                mutable_bitField0_ |= 0x00000002;
-              }
-              family_.add(input.readBytes());
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
-          family_ = java.util.Collections.unmodifiableList(family_);
-        }
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
+    
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor;
     }
-
+    
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<GetStoreFileRequest> PARSER =
-        new com.google.protobuf.AbstractParser<GetStoreFileRequest>() {
-      public GetStoreFileRequest parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new GetStoreFileRequest(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<GetStoreFileRequest> getParserForType() {
-      return PARSER;
+      return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable;
     }
-
+    
     private int bitField0_;
     // required .RegionSpecifier region = 1;
     public static final int REGION_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     public boolean hasRegion() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
       return region_;
     }
-    /**
-     * <code>required .RegionSpecifier region = 1;</code>
-     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
       return region_;
     }
-
+    
     // repeated bytes family = 2;
     public static final int FAMILY_FIELD_NUMBER = 2;
     private java.util.List<com.google.protobuf.ByteString> family_;
-    /**
-     * <code>repeated bytes family = 2;</code>
-     */
     public java.util.List<com.google.protobuf.ByteString>
         getFamilyList() {
       return family_;
     }
-    /**
-     * <code>repeated bytes family = 2;</code>
-     */
     public int getFamilyCount() {
       return family_.size();
     }
-    /**
-     * <code>repeated bytes family = 2;</code>
-     */
     public com.google.protobuf.ByteString getFamily(int index) {
       return family_.get(index);
     }
-
+    
     private void initFields() {
       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
-      family_ = java.util.Collections.emptyList();
+      family_ = java.util.Collections.emptyList();;
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-
+      
       if (!hasRegion()) {
         memoizedIsInitialized = 0;
         return false;
@@ -1636,7 +1259,7 @@ public final class AdminProtos {
       memoizedIsInitialized = 1;
       return true;
     }
-
+    
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -1648,12 +1271,12 @@ public final class AdminProtos {
       }
       getUnknownFields().writeTo(output);
     }
-
+    
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-
+    
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -1672,14 +1295,14 @@ public final class AdminProtos {
       memoizedSerializedSize = size;
       return size;
     }
-
+    
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-
+    
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -1689,7 +1312,7 @@ public final class AdminProtos {
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj;
-
+      
       boolean result = true;
       result = result && (hasRegion() == other.hasRegion());
       if (hasRegion()) {
@@ -1702,13 +1325,9 @@ public final class AdminProtos {
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-
-    private int memoizedHashCode = 0;
+    
     @java.lang.Override
     public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasRegion()) {
@@ -1720,85 +1339,89 @@ public final class AdminProtos {
         hash = (53 * hash) + getFamilyList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
-      memoizedHashCode = hash;
       return hash;
     }
-
+    
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
+      return newBuilder().mergeFrom(data).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
+      return newBuilder().mergeFrom(data).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return newBuilder().mergeFrom(input).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return newBuilder().mergeFrom(input).buildParsed();
     }
     public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
     }
-
+    
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-
+    
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
-    /**
-     * Protobuf type {@code GetStoreFileRequest}
-     *
-     * <pre>
-     **
-     * Get a list of store files for a set of column families in a particular region.
-     * If no column family is specified, get the store files for all column families.
-     * </pre>
-     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder {
@@ -1806,21 +1429,18 @@ public final class AdminProtos {
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor;
       }
-
+      
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class);
+        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable;
       }
-
+      
       // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-
-      private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      
+      private Builder(BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -1832,7 +1452,7 @@ public final class AdminProtos {
       private static Builder create() {
         return new Builder();
       }
-
+      
       public Builder clear() {
         super.clear();
         if (regionBuilder_ == null) {
@@ -1841,24 +1461,24 @@ public final class AdminProtos {
           regionBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000001);
-        family_ = java.util.Collections.emptyList();
+        family_ = java.util.Collections.emptyList();;
         bitField0_ = (bitField0_ & ~0x00000002);
         return this;
       }
-
+      
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-
+      
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor;
+        return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDescriptor();
       }
-
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance();
       }
-
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() {
         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial();
         if (!result.isInitialized()) {
@@ -1866,7 +1486,17 @@ public final class AdminProtos {
         }
         return result;
       }
-
+      
+      private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return result;
+      }
+      
       public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this);
         int from_bitField0_ = bitField0_;
@@ -1888,7 +1518,7 @@ public final class AdminProtos {
         onBuilt();
         return result;
       }
-
+      
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other);
@@ -1897,7 +1527,7 @@ public final class AdminProtos {
           return this;
         }
       }
-
+      
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this;
         if (other.hasRegion()) {
@@ -1916,7 +1546,7 @@ public final class AdminProtos {
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-
+      
       public final boolean isInitialized() {
         if (!hasRegion()) {
           
@@ -1928,39 +1558,57 @@ public final class AdminProtos {
         }
         return true;
       }
-
+      
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              onChanged();
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                onChanged();
+                return this;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
+              if (hasRegion()) {
+                subBuilder.mergeFrom(getRegion());
+              }
+              input.readMessage(subBuilder, extensionRegistry);
+              setRegion(subBuilder.buildPartial());
+              break;
+            }
+            case 18: {
+              ensureFamilyIsMutable();
+              family_.add(input.readBytes());
+              break;
+            }
           }
         }
-        return this;
       }
+      
       private int bitField0_;
-
+      
       // required .RegionSpecifier region = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public boolean hasRegion() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
         if (regionBuilder_ == null) {
           return region_;
@@ -1968,9 +1616,6 @@ public final class AdminProtos {
           return regionBuilder_.getMessage();
         }
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
         if (regionBuilder_ == null) {
           if (value == null) {
@@ -1984,9 +1629,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder setRegion(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
         if (regionBuilder_ == null) {
@@ -1998,9 +1640,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
         if (regionBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
@@ -2017,9 +1656,6 @@ public final class AdminProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public Builder clearRegion() {
         if (regionBuilder_ == null) {
           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
@@ -2030,17 +1666,11 @@ public final class AdminProtos {
         bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getRegionFieldBuilder().getBuilder();
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
         if (regionBuilder_ != null) {
           return regionBuilder_.getMessageOrBuilder();
@@ -2048,9 +1678,6 @@ public final class AdminProtos {
           return region_;
         }
       }
-      /**
-       * <code>required .RegionSpecifier region = 1;</code>
-       */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
           getRegionFieldBuilder() {
@@ -2064,37 +1691,25 @@ public final class AdminProtos {
         }
         return regionBuilder_;
       }
-
+      
       // repeated bytes family = 2;
-      private java.util.List<com.google.protobuf.ByteString> family_ = java.util.Collections.emptyList();
+      private java.util.List<com.google.protobuf.ByteString> family_ = java.util.Collections.emptyList();;
       private void ensureFamilyIsMutable() {
         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
           family_ = new java.util.ArrayList<com.google.protobuf.ByteString>(family_);
           bitField0_ |= 0x00000002;
          }
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public java.util.List<com.google.protobuf.ByteString>
           getFamilyList() {
         return java.util.Collections.unmodifiableList(family_);
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public int getFamilyCount() {
         return family_.size();
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public com.google.protobuf.ByteString getFamily(int index) {
         return family_.get(index);
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public Builder setFamily(
           int index, com.google.protobuf.ByteString value) {
         if (value == null) {
@@ -2105,9 +1720,6 @@ public final class AdminProtos {
         onChanged();
         return this;
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public Builder addFamily(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
@@ -2117,9 +1729,6 @@ public final class AdminProtos {
         onChanged();
         return this;
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public Builder addAllFamily(
           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
         ensureFamilyIsMutable();
@@ -2127,181 +1736,74 @@ public final class AdminProtos {
         onChanged();
         return this;
       }
-      /**
-       * <code>repeated bytes family = 2;</code>
-       */
       public Builder clearFamily() {
-        family_ = java.util.Collections.emptyList();
+        family_ = java.util.Collections.emptyList();;
         bitField0_ = (bitField0_ & ~0x00000002);
         onChanged();
         return this;
       }
-
+      
       // @@protoc_insertion_point(builder_scope:GetStoreFileRequest)
     }
-
+    
     static {
       defaultInstance = new GetStoreFileRequest(true);
       defaultInstance.initFields();
     }
-
+    
     // @@protoc_insertion_point(class_scope:GetStoreFileRequest)
   }
-
+  
   public interface GetStoreFileResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-
+    
     // repeated string storeFile = 1;
-    /**
-     * <code>repeated string storeFile = 1;</code>
-     */
-    java.util.List<java.lang.String>
-    getStoreFileList();
-    /**
-     * <code>repeated string storeFile = 1;</code>
-     */
+    java.util.List<String> getStoreFileList();
     int getStoreFileCount();
-    /**
-     * <code>repeated string storeFile = 1;</code>
-     */
-    java.lang.String getStoreFile(int index);
-    /**
-     * <code>repeated string storeFile = 1;</code>
-     */
-    com.google.protobuf.ByteString
-        getStoreFileBytes(int index);
+    String getStoreFile(int index);
   }
-  /**
-   * Protobuf type {@code GetStoreFileResponse}
-   */
   public static final class GetStoreFileResponse extends
       com.google.protobuf.GeneratedMessage
       implements GetStoreFileResponseOrBuilder {
     // Use GetStoreFileResponse.newBuilder() to construct.
-    private GetStoreFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private GetStoreFileResponse(Builder builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private GetStoreFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
+    private GetStoreFileResponse(boolean noInit) {}
+    
     private static final GetStoreFileResponse defaultInstance;
     public static GetStoreFileResponse getDefaultInstance() {
       return defaultInstance;
     }
-
+    
     public GetStoreFileResponse getDefaultInstanceForType() {
       return defaultInstance;
     }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private GetStoreFileResponse(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();

[... 20018 lines stripped ...]