You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/04/10 18:43:20 UTC

svn commit: r1466557 [22/41] - in /hbase/branches/0.95: ./ hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-p...

Modified: hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java?rev=1466557&r1=1466556&r2=1466557&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java (original)
+++ hbase/branches/0.95/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java Wed Apr 10 16:43:18 2013
@@ -10,78 +10,239 @@ public final class RegionServerStatusPro
   }
   public interface RegionServerStartupRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-    
+
     // required uint32 port = 1;
+    /**
+     * <code>required uint32 port = 1;</code>
+     *
+     * <pre>
+     ** Port number this regionserver is up on 
+     * </pre>
+     */
     boolean hasPort();
+    /**
+     * <code>required uint32 port = 1;</code>
+     *
+     * <pre>
+     ** Port number this regionserver is up on 
+     * </pre>
+     */
     int getPort();
-    
+
     // required uint64 serverStartCode = 2;
+    /**
+     * <code>required uint64 serverStartCode = 2;</code>
+     *
+     * <pre>
+     ** This servers' startcode 
+     * </pre>
+     */
     boolean hasServerStartCode();
+    /**
+     * <code>required uint64 serverStartCode = 2;</code>
+     *
+     * <pre>
+     ** This servers' startcode 
+     * </pre>
+     */
     long getServerStartCode();
-    
+
     // required uint64 serverCurrentTime = 3;
+    /**
+     * <code>required uint64 serverCurrentTime = 3;</code>
+     *
+     * <pre>
+     ** Current time of the region server in ms 
+     * </pre>
+     */
     boolean hasServerCurrentTime();
+    /**
+     * <code>required uint64 serverCurrentTime = 3;</code>
+     *
+     * <pre>
+     ** Current time of the region server in ms 
+     * </pre>
+     */
     long getServerCurrentTime();
   }
+  /**
+   * Protobuf type {@code RegionServerStartupRequest}
+   */
   public static final class RegionServerStartupRequest extends
       com.google.protobuf.GeneratedMessage
       implements RegionServerStartupRequestOrBuilder {
     // Use RegionServerStartupRequest.newBuilder() to construct.
-    private RegionServerStartupRequest(Builder builder) {
+    private RegionServerStartupRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
       super(builder);
+      this.unknownFields = builder.getUnknownFields();
     }
-    private RegionServerStartupRequest(boolean noInit) {}
-    
+    private RegionServerStartupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
     private static final RegionServerStartupRequest defaultInstance;
     public static RegionServerStartupRequest getDefaultInstance() {
       return defaultInstance;
     }
-    
+
     public RegionServerStartupRequest getDefaultInstanceForType() {
       return defaultInstance;
     }
-    
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private RegionServerStartupRequest(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              port_ = input.readUInt32();
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              serverStartCode_ = input.readUInt64();
+              break;
+            }
+            case 24: {
+              bitField0_ |= 0x00000004;
+              serverCurrentTime_ = input.readUInt64();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor;
     }
-    
+
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable;
+      return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<RegionServerStartupRequest> PARSER =
+        new com.google.protobuf.AbstractParser<RegionServerStartupRequest>() {
+      public RegionServerStartupRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new RegionServerStartupRequest(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<RegionServerStartupRequest> getParserForType() {
+      return PARSER;
     }
-    
+
     private int bitField0_;
     // required uint32 port = 1;
     public static final int PORT_FIELD_NUMBER = 1;
     private int port_;
+    /**
+     * <code>required uint32 port = 1;</code>
+     *
+     * <pre>
+     ** Port number this regionserver is up on 
+     * </pre>
+     */
     public boolean hasPort() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
+    /**
+     * <code>required uint32 port = 1;</code>
+     *
+     * <pre>
+     ** Port number this regionserver is up on 
+     * </pre>
+     */
     public int getPort() {
       return port_;
     }
-    
+
     // required uint64 serverStartCode = 2;
     public static final int SERVERSTARTCODE_FIELD_NUMBER = 2;
     private long serverStartCode_;
+    /**
+     * <code>required uint64 serverStartCode = 2;</code>
+     *
+     * <pre>
+     ** This servers' startcode 
+     * </pre>
+     */
     public boolean hasServerStartCode() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
+    /**
+     * <code>required uint64 serverStartCode = 2;</code>
+     *
+     * <pre>
+     ** This servers' startcode 
+     * </pre>
+     */
     public long getServerStartCode() {
       return serverStartCode_;
     }
-    
+
     // required uint64 serverCurrentTime = 3;
     public static final int SERVERCURRENTTIME_FIELD_NUMBER = 3;
     private long serverCurrentTime_;
+    /**
+     * <code>required uint64 serverCurrentTime = 3;</code>
+     *
+     * <pre>
+     ** Current time of the region server in ms 
+     * </pre>
+     */
     public boolean hasServerCurrentTime() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
     }
+    /**
+     * <code>required uint64 serverCurrentTime = 3;</code>
+     *
+     * <pre>
+     ** Current time of the region server in ms 
+     * </pre>
+     */
     public long getServerCurrentTime() {
       return serverCurrentTime_;
     }
-    
+
     private void initFields() {
       port_ = 0;
       serverStartCode_ = 0L;
@@ -91,7 +252,7 @@ public final class RegionServerStatusPro
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-      
+
       if (!hasPort()) {
         memoizedIsInitialized = 0;
         return false;
@@ -107,7 +268,7 @@ public final class RegionServerStatusPro
       memoizedIsInitialized = 1;
       return true;
     }
-    
+
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -122,12 +283,12 @@ public final class RegionServerStatusPro
       }
       getUnknownFields().writeTo(output);
     }
-    
+
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-    
+
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -145,14 +306,14 @@ public final class RegionServerStatusPro
       memoizedSerializedSize = size;
       return size;
     }
-    
+
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-    
+
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -162,7 +323,7 @@ public final class RegionServerStatusPro
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) obj;
-      
+
       boolean result = true;
       result = result && (hasPort() == other.hasPort());
       if (hasPort()) {
@@ -183,9 +344,13 @@ public final class RegionServerStatusPro
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-    
+
+    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasPort()) {
@@ -201,89 +366,79 @@ public final class RegionServerStatusPro
         hash = (53 * hash) + hashLong(getServerCurrentTime());
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
       return hash;
     }
-    
+
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
-    
+
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-    
+
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
+    /**
+     * Protobuf type {@code RegionServerStartupRequest}
+     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder {
@@ -291,18 +446,21 @@ public final class RegionServerStatusPro
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor;
       }
-      
+
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable;
+        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class);
       }
-      
+
       // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-      
-      private Builder(BuilderParent parent) {
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -313,7 +471,7 @@ public final class RegionServerStatusPro
       private static Builder create() {
         return new Builder();
       }
-      
+
       public Builder clear() {
         super.clear();
         port_ = 0;
@@ -324,20 +482,20 @@ public final class RegionServerStatusPro
         bitField0_ = (bitField0_ & ~0x00000004);
         return this;
       }
-      
+
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-      
+
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDescriptor();
+        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupRequest_descriptor;
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance();
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest build() {
         org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial();
         if (!result.isInitialized()) {
@@ -345,17 +503,7 @@ public final class RegionServerStatusPro
         }
         return result;
       }
-      
-      private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(this);
         int from_bitField0_ = bitField0_;
@@ -376,7 +524,7 @@ public final class RegionServerStatusPro
         onBuilt();
         return result;
       }
-      
+
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other);
@@ -385,7 +533,7 @@ public final class RegionServerStatusPro
           return this;
         }
       }
-      
+
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance()) return this;
         if (other.hasPort()) {
@@ -400,7 +548,7 @@ public final class RegionServerStatusPro
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-      
+
       public final boolean isInitialized() {
         if (!hasPort()) {
           
@@ -416,187 +564,419 @@ public final class RegionServerStatusPro
         }
         return true;
       }
-      
+
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 8: {
-              bitField0_ |= 0x00000001;
-              port_ = input.readUInt32();
-              break;
-            }
-            case 16: {
-              bitField0_ |= 0x00000002;
-              serverStartCode_ = input.readUInt64();
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              serverCurrentTime_ = input.readUInt64();
-              break;
-            }
+        org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
           }
         }
+        return this;
       }
-      
       private int bitField0_;
-      
+
       // required uint32 port = 1;
       private int port_ ;
+      /**
+       * <code>required uint32 port = 1;</code>
+       *
+       * <pre>
+       ** Port number this regionserver is up on 
+       * </pre>
+       */
       public boolean hasPort() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
+      /**
+       * <code>required uint32 port = 1;</code>
+       *
+       * <pre>
+       ** Port number this regionserver is up on 
+       * </pre>
+       */
       public int getPort() {
         return port_;
       }
+      /**
+       * <code>required uint32 port = 1;</code>
+       *
+       * <pre>
+       ** Port number this regionserver is up on 
+       * </pre>
+       */
       public Builder setPort(int value) {
         bitField0_ |= 0x00000001;
         port_ = value;
         onChanged();
         return this;
       }
+      /**
+       * <code>required uint32 port = 1;</code>
+       *
+       * <pre>
+       ** Port number this regionserver is up on 
+       * </pre>
+       */
       public Builder clearPort() {
         bitField0_ = (bitField0_ & ~0x00000001);
         port_ = 0;
         onChanged();
         return this;
       }
-      
+
       // required uint64 serverStartCode = 2;
       private long serverStartCode_ ;
+      /**
+       * <code>required uint64 serverStartCode = 2;</code>
+       *
+       * <pre>
+       ** This servers' startcode 
+       * </pre>
+       */
       public boolean hasServerStartCode() {
         return ((bitField0_ & 0x00000002) == 0x00000002);
       }
+      /**
+       * <code>required uint64 serverStartCode = 2;</code>
+       *
+       * <pre>
+       ** This servers' startcode 
+       * </pre>
+       */
       public long getServerStartCode() {
         return serverStartCode_;
       }
+      /**
+       * <code>required uint64 serverStartCode = 2;</code>
+       *
+       * <pre>
+       ** This servers' startcode 
+       * </pre>
+       */
       public Builder setServerStartCode(long value) {
         bitField0_ |= 0x00000002;
         serverStartCode_ = value;
         onChanged();
         return this;
       }
+      /**
+       * <code>required uint64 serverStartCode = 2;</code>
+       *
+       * <pre>
+       ** This servers' startcode 
+       * </pre>
+       */
       public Builder clearServerStartCode() {
         bitField0_ = (bitField0_ & ~0x00000002);
         serverStartCode_ = 0L;
         onChanged();
         return this;
       }
-      
+
       // required uint64 serverCurrentTime = 3;
       private long serverCurrentTime_ ;
+      /**
+       * <code>required uint64 serverCurrentTime = 3;</code>
+       *
+       * <pre>
+       ** Current time of the region server in ms 
+       * </pre>
+       */
       public boolean hasServerCurrentTime() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
+      /**
+       * <code>required uint64 serverCurrentTime = 3;</code>
+       *
+       * <pre>
+       ** Current time of the region server in ms 
+       * </pre>
+       */
       public long getServerCurrentTime() {
         return serverCurrentTime_;
       }
+      /**
+       * <code>required uint64 serverCurrentTime = 3;</code>
+       *
+       * <pre>
+       ** Current time of the region server in ms 
+       * </pre>
+       */
       public Builder setServerCurrentTime(long value) {
         bitField0_ |= 0x00000004;
         serverCurrentTime_ = value;
         onChanged();
         return this;
       }
+      /**
+       * <code>required uint64 serverCurrentTime = 3;</code>
+       *
+       * <pre>
+       ** Current time of the region server in ms 
+       * </pre>
+       */
       public Builder clearServerCurrentTime() {
         bitField0_ = (bitField0_ & ~0x00000004);
         serverCurrentTime_ = 0L;
         onChanged();
         return this;
       }
-      
+
       // @@protoc_insertion_point(builder_scope:RegionServerStartupRequest)
     }
-    
+
     static {
       defaultInstance = new RegionServerStartupRequest(true);
       defaultInstance.initFields();
     }
-    
+
     // @@protoc_insertion_point(class_scope:RegionServerStartupRequest)
   }
-  
+
   public interface RegionServerStartupResponseOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-    
+
     // repeated .NameStringPair mapEntries = 1;
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> 
         getMapEntriesList();
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index);
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     int getMapEntriesCount();
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> 
         getMapEntriesOrBuilderList();
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder(
         int index);
   }
+  /**
+   * Protobuf type {@code RegionServerStartupResponse}
+   */
   public static final class RegionServerStartupResponse extends
       com.google.protobuf.GeneratedMessage
       implements RegionServerStartupResponseOrBuilder {
     // Use RegionServerStartupResponse.newBuilder() to construct.
-    private RegionServerStartupResponse(Builder builder) {
+    private RegionServerStartupResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
       super(builder);
+      this.unknownFields = builder.getUnknownFields();
     }
-    private RegionServerStartupResponse(boolean noInit) {}
-    
+    private RegionServerStartupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
     private static final RegionServerStartupResponse defaultInstance;
     public static RegionServerStartupResponse getDefaultInstance() {
       return defaultInstance;
     }
-    
+
     public RegionServerStartupResponse getDefaultInstanceForType() {
       return defaultInstance;
     }
-    
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private RegionServerStartupResponse(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                mapEntries_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              mapEntries_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor;
     }
-    
+
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable;
+      return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<RegionServerStartupResponse> PARSER =
+        new com.google.protobuf.AbstractParser<RegionServerStartupResponse>() {
+      public RegionServerStartupResponse parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new RegionServerStartupResponse(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<RegionServerStartupResponse> getParserForType() {
+      return PARSER;
     }
-    
+
     // repeated .NameStringPair mapEntries = 1;
     public static final int MAPENTRIES_FIELD_NUMBER = 1;
     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> mapEntries_;
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getMapEntriesList() {
       return mapEntries_;
     }
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> 
         getMapEntriesOrBuilderList() {
       return mapEntries_;
     }
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     public int getMapEntriesCount() {
       return mapEntries_.size();
     }
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) {
       return mapEntries_.get(index);
     }
+    /**
+     * <code>repeated .NameStringPair mapEntries = 1;</code>
+     *
+     * <pre>
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * </pre>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder(
         int index) {
       return mapEntries_.get(index);
     }
-    
+
     private void initFields() {
       mapEntries_ = java.util.Collections.emptyList();
     }
@@ -604,7 +984,7 @@ public final class RegionServerStatusPro
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-      
+
       for (int i = 0; i < getMapEntriesCount(); i++) {
         if (!getMapEntries(i).isInitialized()) {
           memoizedIsInitialized = 0;
@@ -614,7 +994,7 @@ public final class RegionServerStatusPro
       memoizedIsInitialized = 1;
       return true;
     }
-    
+
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -623,12 +1003,12 @@ public final class RegionServerStatusPro
       }
       getUnknownFields().writeTo(output);
     }
-    
+
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-    
+
       size = 0;
       for (int i = 0; i < mapEntries_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
@@ -638,14 +1018,14 @@ public final class RegionServerStatusPro
       memoizedSerializedSize = size;
       return size;
     }
-    
+
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-    
+
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -655,7 +1035,7 @@ public final class RegionServerStatusPro
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) obj;
-      
+
       boolean result = true;
       result = result && getMapEntriesList()
           .equals(other.getMapEntriesList());
@@ -663,9 +1043,13 @@ public final class RegionServerStatusPro
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-    
+
+    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (getMapEntriesCount() > 0) {
@@ -673,89 +1057,79 @@ public final class RegionServerStatusPro
         hash = (53 * hash) + getMapEntriesList().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
       return hash;
     }
-    
+
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
-    
+
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-    
+
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
+    /**
+     * Protobuf type {@code RegionServerStartupResponse}
+     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder {
@@ -763,18 +1137,21 @@ public final class RegionServerStatusPro
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor;
       }
-      
+
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable;
+        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class);
       }
-      
+
       // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-      
-      private Builder(BuilderParent parent) {
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -786,7 +1163,7 @@ public final class RegionServerStatusPro
       private static Builder create() {
         return new Builder();
       }
-      
+
       public Builder clear() {
         super.clear();
         if (mapEntriesBuilder_ == null) {
@@ -797,20 +1174,20 @@ public final class RegionServerStatusPro
         }
         return this;
       }
-      
+
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-      
+
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDescriptor();
+        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerStartupResponse_descriptor;
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance();
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse build() {
         org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial();
         if (!result.isInitialized()) {
@@ -818,17 +1195,7 @@ public final class RegionServerStatusPro
         }
         return result;
       }
-      
-      private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(this);
         int from_bitField0_ = bitField0_;
@@ -844,7 +1211,7 @@ public final class RegionServerStatusPro
         onBuilt();
         return result;
       }
-      
+
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other);
@@ -853,7 +1220,7 @@ public final class RegionServerStatusPro
           return this;
         }
       }
-      
+
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()) return this;
         if (mapEntriesBuilder_ == null) {
@@ -885,7 +1252,7 @@ public final class RegionServerStatusPro
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-      
+
       public final boolean isInitialized() {
         for (int i = 0; i < getMapEntriesCount(); i++) {
           if (!getMapEntries(i).isInitialized()) {
@@ -895,42 +1262,26 @@ public final class RegionServerStatusPro
         }
         return true;
       }
-      
+
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
-              input.readMessage(subBuilder, extensionRegistry);
-              addMapEntries(subBuilder.buildPartial());
-              break;
-            }
+        org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
           }
         }
+        return this;
       }
-      
       private int bitField0_;
-      
+
       // repeated .NameStringPair mapEntries = 1;
       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> mapEntries_ =
         java.util.Collections.emptyList();
@@ -940,10 +1291,20 @@ public final class RegionServerStatusPro
           bitField0_ |= 0x00000001;
          }
       }
-      
+
       private com.google.protobuf.RepeatedFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_;
-      
+
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getMapEntriesList() {
         if (mapEntriesBuilder_ == null) {
           return java.util.Collections.unmodifiableList(mapEntries_);
@@ -951,6 +1312,16 @@ public final class RegionServerStatusPro
           return mapEntriesBuilder_.getMessageList();
         }
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public int getMapEntriesCount() {
         if (mapEntriesBuilder_ == null) {
           return mapEntries_.size();
@@ -958,6 +1329,16 @@ public final class RegionServerStatusPro
           return mapEntriesBuilder_.getCount();
         }
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) {
         if (mapEntriesBuilder_ == null) {
           return mapEntries_.get(index);
@@ -965,6 +1346,16 @@ public final class RegionServerStatusPro
           return mapEntriesBuilder_.getMessage(index);
         }
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder setMapEntries(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
         if (mapEntriesBuilder_ == null) {
@@ -979,6 +1370,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder setMapEntries(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
         if (mapEntriesBuilder_ == null) {
@@ -990,6 +1391,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder addMapEntries(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
         if (mapEntriesBuilder_ == null) {
           if (value == null) {
@@ -1003,6 +1414,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder addMapEntries(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
         if (mapEntriesBuilder_ == null) {
@@ -1017,6 +1438,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder addMapEntries(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
         if (mapEntriesBuilder_ == null) {
@@ -1028,6 +1459,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder addMapEntries(
           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
         if (mapEntriesBuilder_ == null) {
@@ -1039,6 +1480,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder addAllMapEntries(
           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
         if (mapEntriesBuilder_ == null) {
@@ -1050,6 +1501,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder clearMapEntries() {
         if (mapEntriesBuilder_ == null) {
           mapEntries_ = java.util.Collections.emptyList();
@@ -1060,6 +1521,16 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public Builder removeMapEntries(int index) {
         if (mapEntriesBuilder_ == null) {
           ensureMapEntriesIsMutable();
@@ -1070,10 +1541,30 @@ public final class RegionServerStatusPro
         }
         return this;
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder(
           int index) {
         return getMapEntriesFieldBuilder().getBuilder(index);
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder(
           int index) {
         if (mapEntriesBuilder_ == null) {
@@ -1081,6 +1572,16 @@ public final class RegionServerStatusPro
           return mapEntriesBuilder_.getMessageOrBuilder(index);
         }
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> 
            getMapEntriesOrBuilderList() {
         if (mapEntriesBuilder_ != null) {
@@ -1089,15 +1590,45 @@ public final class RegionServerStatusPro
           return java.util.Collections.unmodifiableList(mapEntries_);
         }
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() {
         return getMapEntriesFieldBuilder().addBuilder(
             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder(
           int index) {
         return getMapEntriesFieldBuilder().addBuilder(
             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
       }
+      /**
+       * <code>repeated .NameStringPair mapEntries = 1;</code>
+       *
+       * <pre>
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * </pre>
+       */
       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> 
            getMapEntriesBuilderList() {
         return getMapEntriesFieldBuilder().getBuilderList();
@@ -1116,86 +1647,234 @@ public final class RegionServerStatusPro
         }
         return mapEntriesBuilder_;
       }
-      
+
       // @@protoc_insertion_point(builder_scope:RegionServerStartupResponse)
     }
-    
+
     static {
       defaultInstance = new RegionServerStartupResponse(true);
       defaultInstance.initFields();
     }
-    
+
     // @@protoc_insertion_point(class_scope:RegionServerStartupResponse)
   }
-  
+
   public interface RegionServerReportRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
-    
+
     // required .ServerName server = 1;
+    /**
+     * <code>required .ServerName server = 1;</code>
+     */
     boolean hasServer();
+    /**
+     * <code>required .ServerName server = 1;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
+    /**
+     * <code>required .ServerName server = 1;</code>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
-    
+
     // optional .ServerLoad load = 2;
+    /**
+     * <code>optional .ServerLoad load = 2;</code>
+     *
+     * <pre>
+     ** load the server is under 
+     * </pre>
+     */
     boolean hasLoad();
+    /**
+     * <code>optional .ServerLoad load = 2;</code>
+     *
+     * <pre>
+     ** load the server is under 
+     * </pre>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad();
+    /**
+     * <code>optional .ServerLoad load = 2;</code>
+     *
+     * <pre>
+     ** load the server is under 
+     * </pre>
+     */
     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder();
   }
+  /**
+   * Protobuf type {@code RegionServerReportRequest}
+   */
   public static final class RegionServerReportRequest extends
       com.google.protobuf.GeneratedMessage
       implements RegionServerReportRequestOrBuilder {
     // Use RegionServerReportRequest.newBuilder() to construct.
-    private RegionServerReportRequest(Builder builder) {
+    private RegionServerReportRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
       super(builder);
+      this.unknownFields = builder.getUnknownFields();
     }
-    private RegionServerReportRequest(boolean noInit) {}
-    
+    private RegionServerReportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
     private static final RegionServerReportRequest defaultInstance;
     public static RegionServerReportRequest getDefaultInstance() {
       return defaultInstance;
     }
-    
+
     public RegionServerReportRequest getDefaultInstanceForType() {
       return defaultInstance;
     }
-    
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private RegionServerReportRequest(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000001) == 0x00000001)) {
+                subBuilder = server_.toBuilder();
+              }
+              server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(server_);
+                server_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000001;
+              break;
+            }
+            case 18: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000002) == 0x00000002)) {
+                subBuilder = load_.toBuilder();
+              }
+              load_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(load_);
+                load_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000002;
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
       return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor;
     }
-    
+
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable;
+      return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<RegionServerReportRequest> PARSER =
+        new com.google.protobuf.AbstractParser<RegionServerReportRequest>() {
+      public RegionServerReportRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new RegionServerReportRequest(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<RegionServerReportRequest> getParserForType() {
+      return PARSER;
     }
-    
+
     private int bitField0_;
     // required .ServerName server = 1;
     public static final int SERVER_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
+    /**
+     * <code>required .ServerName server = 1;</code>
+     */
     public boolean hasServer() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
+    /**
+     * <code>required .ServerName server = 1;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
       return server_;
     }
+    /**
+     * <code>required .ServerName server = 1;</code>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
       return server_;
     }
-    
+
     // optional .ServerLoad load = 2;
     public static final int LOAD_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad load_;
+    /**
+     * <code>optional .ServerLoad load = 2;</code>
+     *
+     * <pre>
+     ** load the server is under 
+     * </pre>
+     */
     public boolean hasLoad() {
       return ((bitField0_ & 0x00000002) == 0x00000002);
     }
+    /**
+     * <code>optional .ServerLoad load = 2;</code>
+     *
+     * <pre>
+     ** load the server is under 
+     * </pre>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getLoad() {
       return load_;
     }
+    /**
+     * <code>optional .ServerLoad load = 2;</code>
+     *
+     * <pre>
+     ** load the server is under 
+     * </pre>
+     */
     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getLoadOrBuilder() {
       return load_;
     }
-    
+
     private void initFields() {
       server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
       load_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance();
@@ -1204,7 +1883,7 @@ public final class RegionServerStatusPro
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
-      
+
       if (!hasServer()) {
         memoizedIsInitialized = 0;
         return false;
@@ -1222,7 +1901,7 @@ public final class RegionServerStatusPro
       memoizedIsInitialized = 1;
       return true;
     }
-    
+
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
       getSerializedSize();
@@ -1234,12 +1913,12 @@ public final class RegionServerStatusPro
       }
       getUnknownFields().writeTo(output);
     }
-    
+
     private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
       int size = memoizedSerializedSize;
       if (size != -1) return size;
-    
+
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
@@ -1253,14 +1932,14 @@ public final class RegionServerStatusPro
       memoizedSerializedSize = size;
       return size;
     }
-    
+
     private static final long serialVersionUID = 0L;
     @java.lang.Override
     protected java.lang.Object writeReplace()
         throws java.io.ObjectStreamException {
       return super.writeReplace();
     }
-    
+
     @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
@@ -1270,7 +1949,7 @@ public final class RegionServerStatusPro
         return super.equals(obj);
       }
       org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) obj;
-      
+
       boolean result = true;
       result = result && (hasServer() == other.hasServer());
       if (hasServer()) {
@@ -1286,9 +1965,13 @@ public final class RegionServerStatusPro
           getUnknownFields().equals(other.getUnknownFields());
       return result;
     }
-    
+
+    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasServer()) {
@@ -1300,89 +1983,79 @@ public final class RegionServerStatusPro
         hash = (53 * hash) + getLoad().hashCode();
       }
       hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
       return hash;
     }
-    
+
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data).buildParsed();
+      return PARSER.parseFrom(data);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      return newBuilder().mergeFrom(data, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(data, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      Builder builder = newBuilder();
-      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
-        return builder.buildParsed();
-      } else {
-        return null;
-      }
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input).buildParsed();
+      return PARSER.parseFrom(input);
     }
     public static org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return newBuilder().mergeFrom(input, extensionRegistry)
-               .buildParsed();
+      return PARSER.parseFrom(input, extensionRegistry);
     }
-    
+
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
-    
+
     @java.lang.Override
     protected Builder newBuilderForType(
         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
+    /**
+     * Protobuf type {@code RegionServerReportRequest}
+     */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
        implements org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder {
@@ -1390,18 +2063,21 @@ public final class RegionServerStatusPro
           getDescriptor() {
         return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor;
       }
-      
+
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable;
+        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class);
       }
-      
+
       // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
-      
-      private Builder(BuilderParent parent) {
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
@@ -1414,7 +2090,7 @@ public final class RegionServerStatusPro
       private static Builder create() {
         return new Builder();
       }
-      
+
       public Builder clear() {
         super.clear();
         if (serverBuilder_ == null) {
@@ -1431,20 +2107,20 @@ public final class RegionServerStatusPro
         bitField0_ = (bitField0_ & ~0x00000002);
         return this;
       }
-      
+
       public Builder clone() {
         return create().mergeFrom(buildPartial());
       }
-      
+
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDescriptor();
+        return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.internal_static_RegionServerReportRequest_descriptor;
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest getDefaultInstanceForType() {
         return org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance();
       }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest build() {
         org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial();
         if (!result.isInitialized()) {
@@ -1452,17 +2128,7 @@ public final class RegionServerStatusPro
         }
         return result;
       }
-      
-      private org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildParsed()
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(
-            result).asInvalidProtocolBufferException();
-        }
-        return result;
-      }
-      
+
       public org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest buildPartial() {
         org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest(this);
         int from_bitField0_ = bitField0_;
@@ -1487,7 +2153,7 @@ public final class RegionServerStatusPro
         onBuilt();
         return result;
       }
-      
+
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) {
           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)other);
@@ -1496,7 +2162,7 @@ public final class RegionServerStatusPro
           return this;
         }
       }
-      
+
       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other) {
         if (other == org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.getDefaultInstance()) return this;
         if (other.hasServer()) {
@@ -1508,7 +2174,7 @@ public final class RegionServerStatusPro
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
-      
+
       public final boolean isInitialized() {
         if (!hasServer()) {
           
@@ -1526,61 +2192,39 @@ public final class RegionServerStatusPro
         }
         return true;
       }
-      
+
       public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder(
-            this.getUnknownFields());
-        while (true) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              this.setUnknownFields(unknownFields.build());
-              onChanged();
-              return this;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                this.setUnknownFields(unknownFields.build());
-                onChanged();
-                return this;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder();
-              if (hasServer()) {
-                subBuilder.mergeFrom(getServer());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setServer(subBuilder.buildPartial());
-              break;
-            }
-            case 18: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder();
-              if (hasLoad()) {
-                subBuilder.mergeFrom(getLoad());
-              }
-              input.readMessage(subBuilder, extensionRegistry);
-              setLoad(subBuilder.buildPartial());
-              break;
-            }
+        org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
           }
         }
+        return this;
       }
-      
       private int bitField0_;
-      
+
       // required .ServerName server = 1;
       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
+      /**
+       * <code>required .ServerName server = 1;</code>
+       */
       public boolean hasServer() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
+      /**
+       * <code>required .ServerName server = 1;</code>
+       */
       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
         if (serverBuilder_ == null) {
           return server_;
@@ -1588,6 +2232,9 @@ public final class RegionServerStatusPro
           return serverBuilder_.getMessage();
         }
       }
+      /**
+       * <code>required .ServerName server = 1;</code>
+       */
       public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
         if (serverBuilder_ == null) {
           if (value == null) {
@@ -1601,6 +2248,9 @@ public final class RegionServerStatusPro
         bitField0_ |= 0x00000001;
         return this;
       }
+      /**
+       * <code>required .ServerName server = 1;</code>
+       */
       public Builder setServer(
           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
         if (serverBuilder_ == null) {
@@ -1612,6 +2262,9 @@ public final class RegionServerStatusPro
         bitField0_ |= 0x00000001;
         return this;
       }
+      /**
+       * <code>required .ServerName server = 1;</code>
+       */
       public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
         if (serverBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&

[... 3508 lines stripped ...]