You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/09/29 19:37:21 UTC

[07/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.google.protobuf (and in REST). The c.g.p in API is for

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
new file mode 100644
index 0000000..222f383
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java
@@ -0,0 +1,6299 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: RPC.proto
+
+package org.apache.hadoop.hbase.shaded.protobuf.generated;
+
+public final class RPCProtos {
+  private RPCProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface UserInformationOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required string effective_user = 1;
+    /**
+     * <code>required string effective_user = 1;</code>
+     */
+    boolean hasEffectiveUser();
+    /**
+     * <code>required string effective_user = 1;</code>
+     */
+    java.lang.String getEffectiveUser();
+    /**
+     * <code>required string effective_user = 1;</code>
+     */
+    com.google.protobuf.ByteString
+        getEffectiveUserBytes();
+
+    // optional string real_user = 2;
+    /**
+     * <code>optional string real_user = 2;</code>
+     */
+    boolean hasRealUser();
+    /**
+     * <code>optional string real_user = 2;</code>
+     */
+    java.lang.String getRealUser();
+    /**
+     * <code>optional string real_user = 2;</code>
+     */
+    com.google.protobuf.ByteString
+        getRealUserBytes();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.UserInformation}
+   *
+   * <pre>
+   * User Information proto.  Included in ConnectionHeader on connection setup
+   * </pre>
+   */
+  public static final class UserInformation extends
+      com.google.protobuf.GeneratedMessage
+      implements UserInformationOrBuilder {
+    // Use UserInformation.newBuilder() to construct.
+    private UserInformation(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private UserInformation(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final UserInformation defaultInstance;
+    public static UserInformation getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public UserInformation getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private UserInformation(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              effectiveUser_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              realUser_ = input.readBytes();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<UserInformation> PARSER =
+        new com.google.protobuf.AbstractParser<UserInformation>() {
+      public UserInformation parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new UserInformation(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<UserInformation> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required string effective_user = 1;
+    public static final int EFFECTIVE_USER_FIELD_NUMBER = 1;
+    private java.lang.Object effectiveUser_;
+    /**
+     * <code>required string effective_user = 1;</code>
+     */
+    public boolean hasEffectiveUser() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required string effective_user = 1;</code>
+     */
+    public java.lang.String getEffectiveUser() {
+      java.lang.Object ref = effectiveUser_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          effectiveUser_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string effective_user = 1;</code>
+     */
+    public com.google.protobuf.ByteString
+        getEffectiveUserBytes() {
+      java.lang.Object ref = effectiveUser_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        effectiveUser_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    // optional string real_user = 2;
+    public static final int REAL_USER_FIELD_NUMBER = 2;
+    private java.lang.Object realUser_;
+    /**
+     * <code>optional string real_user = 2;</code>
+     */
+    public boolean hasRealUser() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional string real_user = 2;</code>
+     */
+    public java.lang.String getRealUser() {
+      java.lang.Object ref = realUser_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          realUser_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string real_user = 2;</code>
+     */
+    public com.google.protobuf.ByteString
+        getRealUserBytes() {
+      java.lang.Object ref = realUser_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        realUser_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    private void initFields() {
+      effectiveUser_ = "";
+      realUser_ = "";
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasEffectiveUser()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, getEffectiveUserBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, getRealUserBytes());
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, getEffectiveUserBytes());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, getRealUserBytes());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) obj;
+
+      boolean result = true;
+      result = result && (hasEffectiveUser() == other.hasEffectiveUser());
+      if (hasEffectiveUser()) {
+        result = result && getEffectiveUser()
+            .equals(other.getEffectiveUser());
+      }
+      result = result && (hasRealUser() == other.hasRealUser());
+      if (hasRealUser()) {
+        result = result && getRealUser()
+            .equals(other.getRealUser());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasEffectiveUser()) {
+        hash = (37 * hash) + EFFECTIVE_USER_FIELD_NUMBER;
+        hash = (53 * hash) + getEffectiveUser().hashCode();
+      }
+      if (hasRealUser()) {
+        hash = (37 * hash) + REAL_USER_FIELD_NUMBER;
+        hash = (53 * hash) + getRealUser().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.UserInformation}
+     *
+     * <pre>
+     * User Information proto.  Included in ConnectionHeader on connection setup
+     * </pre>
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        effectiveUser_ = "";
+        bitField0_ = (bitField0_ & ~0x00000001);
+        realUser_ = "";
+        bitField0_ = (bitField0_ & ~0x00000002);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_UserInformation_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation build() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation buildPartial() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.effectiveUser_ = effectiveUser_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.realUser_ = realUser_;
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) {
+          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation other) {
+        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) return this;
+        if (other.hasEffectiveUser()) {
+          bitField0_ |= 0x00000001;
+          effectiveUser_ = other.effectiveUser_;
+          onChanged();
+        }
+        if (other.hasRealUser()) {
+          bitField0_ |= 0x00000002;
+          realUser_ = other.realUser_;
+          onChanged();
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasEffectiveUser()) {
+          
+          return false;
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required string effective_user = 1;
+      private java.lang.Object effectiveUser_ = "";
+      /**
+       * <code>required string effective_user = 1;</code>
+       */
+      public boolean hasEffectiveUser() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string effective_user = 1;</code>
+       */
+      public java.lang.String getEffectiveUser() {
+        java.lang.Object ref = effectiveUser_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          effectiveUser_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>required string effective_user = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getEffectiveUserBytes() {
+        java.lang.Object ref = effectiveUser_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          effectiveUser_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>required string effective_user = 1;</code>
+       */
+      public Builder setEffectiveUser(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        effectiveUser_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string effective_user = 1;</code>
+       */
+      public Builder clearEffectiveUser() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        effectiveUser_ = getDefaultInstance().getEffectiveUser();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required string effective_user = 1;</code>
+       */
+      public Builder setEffectiveUserBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+        effectiveUser_ = value;
+        onChanged();
+        return this;
+      }
+
+      // optional string real_user = 2;
+      private java.lang.Object realUser_ = "";
+      /**
+       * <code>optional string real_user = 2;</code>
+       */
+      public boolean hasRealUser() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional string real_user = 2;</code>
+       */
+      public java.lang.String getRealUser() {
+        java.lang.Object ref = realUser_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          realUser_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string real_user = 2;</code>
+       */
+      public com.google.protobuf.ByteString
+          getRealUserBytes() {
+        java.lang.Object ref = realUser_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          realUser_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string real_user = 2;</code>
+       */
+      public Builder setRealUser(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        realUser_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string real_user = 2;</code>
+       */
+      public Builder clearRealUser() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        realUser_ = getDefaultInstance().getRealUser();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string real_user = 2;</code>
+       */
+      public Builder setRealUserBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        realUser_ = value;
+        onChanged();
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.UserInformation)
+    }
+
+    static {
+      defaultInstance = new UserInformation(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.UserInformation)
+  }
+
+  public interface ConnectionHeaderOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional .hbase.pb.UserInformation user_info = 1;
+    /**
+     * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+     */
+    boolean hasUserInfo();
+    /**
+     * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo();
+    /**
+     * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder();
+
+    // optional string service_name = 2;
+    /**
+     * <code>optional string service_name = 2;</code>
+     */
+    boolean hasServiceName();
+    /**
+     * <code>optional string service_name = 2;</code>
+     */
+    java.lang.String getServiceName();
+    /**
+     * <code>optional string service_name = 2;</code>
+     */
+    com.google.protobuf.ByteString
+        getServiceNameBytes();
+
+    // optional string cell_block_codec_class = 3;
+    /**
+     * <code>optional string cell_block_codec_class = 3;</code>
+     *
+     * <pre>
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+     * </pre>
+     */
+    boolean hasCellBlockCodecClass();
+    /**
+     * <code>optional string cell_block_codec_class = 3;</code>
+     *
+     * <pre>
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+     * </pre>
+     */
+    java.lang.String getCellBlockCodecClass();
+    /**
+     * <code>optional string cell_block_codec_class = 3;</code>
+     *
+     * <pre>
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+     * </pre>
+     */
+    com.google.protobuf.ByteString
+        getCellBlockCodecClassBytes();
+
+    // optional string cell_block_compressor_class = 4;
+    /**
+     * <code>optional string cell_block_compressor_class = 4;</code>
+     *
+     * <pre>
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+     * </pre>
+     */
+    boolean hasCellBlockCompressorClass();
+    /**
+     * <code>optional string cell_block_compressor_class = 4;</code>
+     *
+     * <pre>
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+     * </pre>
+     */
+    java.lang.String getCellBlockCompressorClass();
+    /**
+     * <code>optional string cell_block_compressor_class = 4;</code>
+     *
+     * <pre>
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+     * </pre>
+     */
+    com.google.protobuf.ByteString
+        getCellBlockCompressorClassBytes();
+
+    // optional .hbase.pb.VersionInfo version_info = 5;
+    /**
+     * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+     */
+    boolean hasVersionInfo();
+    /**
+     * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo();
+    /**
+     * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+     */
+    org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.ConnectionHeader}
+   *
+   * <pre>
+   * This is sent on connection setup after the connection preamble is sent.
+   * </pre>
+   */
+  public static final class ConnectionHeader extends
+      com.google.protobuf.GeneratedMessage
+      implements ConnectionHeaderOrBuilder {
+    // Use ConnectionHeader.newBuilder() to construct.
+    private ConnectionHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private ConnectionHeader(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final ConnectionHeader defaultInstance;
+    public static ConnectionHeader getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public ConnectionHeader getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private ConnectionHeader(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000001) == 0x00000001)) {
+                subBuilder = userInfo_.toBuilder();
+              }
+              userInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(userInfo_);
+                userInfo_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000001;
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              serviceName_ = input.readBytes();
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              cellBlockCodecClass_ = input.readBytes();
+              break;
+            }
+            case 34: {
+              bitField0_ |= 0x00000008;
+              cellBlockCompressorClass_ = input.readBytes();
+              break;
+            }
+            case 42: {
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000010) == 0x00000010)) {
+                subBuilder = versionInfo_.toBuilder();
+              }
+              versionInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(versionInfo_);
+                versionInfo_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000010;
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<ConnectionHeader> PARSER =
+        new com.google.protobuf.AbstractParser<ConnectionHeader>() {
+      public ConnectionHeader parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new ConnectionHeader(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<ConnectionHeader> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional .hbase.pb.UserInformation user_info = 1;
+    public static final int USER_INFO_FIELD_NUMBER = 1;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_;
+    /**
+     * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+     */
+    public boolean hasUserInfo() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+      return userInfo_;
+    }
+    /**
+     * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+      return userInfo_;
+    }
+
+    // optional string service_name = 2;
+    public static final int SERVICE_NAME_FIELD_NUMBER = 2;
+    private java.lang.Object serviceName_;
+    /**
+     * <code>optional string service_name = 2;</code>
+     */
+    public boolean hasServiceName() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional string service_name = 2;</code>
+     */
+    public java.lang.String getServiceName() {
+      java.lang.Object ref = serviceName_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          serviceName_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string service_name = 2;</code>
+     */
+    public com.google.protobuf.ByteString
+        getServiceNameBytes() {
+      java.lang.Object ref = serviceName_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        serviceName_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    // optional string cell_block_codec_class = 3;
+    public static final int CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER = 3;
+    private java.lang.Object cellBlockCodecClass_;
+    /**
+     * <code>optional string cell_block_codec_class = 3;</code>
+     *
+     * <pre>
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+     * </pre>
+     */
+    public boolean hasCellBlockCodecClass() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional string cell_block_codec_class = 3;</code>
+     *
+     * <pre>
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+     * </pre>
+     */
+    public java.lang.String getCellBlockCodecClass() {
+      java.lang.Object ref = cellBlockCodecClass_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          cellBlockCodecClass_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string cell_block_codec_class = 3;</code>
+     *
+     * <pre>
+     * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+     * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+     * </pre>
+     */
+    public com.google.protobuf.ByteString
+        getCellBlockCodecClassBytes() {
+      java.lang.Object ref = cellBlockCodecClass_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        cellBlockCodecClass_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    // optional string cell_block_compressor_class = 4;
+    public static final int CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER = 4;
+    private java.lang.Object cellBlockCompressorClass_;
+    /**
+     * <code>optional string cell_block_compressor_class = 4;</code>
+     *
+     * <pre>
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+     * </pre>
+     */
+    public boolean hasCellBlockCompressorClass() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional string cell_block_compressor_class = 4;</code>
+     *
+     * <pre>
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+     * </pre>
+     */
+    public java.lang.String getCellBlockCompressorClass() {
+      java.lang.Object ref = cellBlockCompressorClass_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          cellBlockCompressorClass_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string cell_block_compressor_class = 4;</code>
+     *
+     * <pre>
+     * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+     * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+     * </pre>
+     */
+    public com.google.protobuf.ByteString
+        getCellBlockCompressorClassBytes() {
+      java.lang.Object ref = cellBlockCompressorClass_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        cellBlockCompressorClass_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    // optional .hbase.pb.VersionInfo version_info = 5;
+    public static final int VERSION_INFO_FIELD_NUMBER = 5;
+    private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_;
+    /**
+     * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+     */
+    public boolean hasVersionInfo() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() {
+      return versionInfo_;
+    }
+    /**
+     * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() {
+      return versionInfo_;
+    }
+
+    private void initFields() {
+      userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+      serviceName_ = "";
+      cellBlockCodecClass_ = "";
+      cellBlockCompressorClass_ = "";
+      versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (hasUserInfo()) {
+        if (!getUserInfo().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      if (hasVersionInfo()) {
+        if (!getVersionInfo().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeMessage(1, userInfo_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, getServiceNameBytes());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, getCellBlockCodecClassBytes());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBytes(4, getCellBlockCompressorClassBytes());
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeMessage(5, versionInfo_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, userInfo_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, getServiceNameBytes());
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, getCellBlockCodecClassBytes());
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(4, getCellBlockCompressorClassBytes());
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(5, versionInfo_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) obj;
+
+      boolean result = true;
+      result = result && (hasUserInfo() == other.hasUserInfo());
+      if (hasUserInfo()) {
+        result = result && getUserInfo()
+            .equals(other.getUserInfo());
+      }
+      result = result && (hasServiceName() == other.hasServiceName());
+      if (hasServiceName()) {
+        result = result && getServiceName()
+            .equals(other.getServiceName());
+      }
+      result = result && (hasCellBlockCodecClass() == other.hasCellBlockCodecClass());
+      if (hasCellBlockCodecClass()) {
+        result = result && getCellBlockCodecClass()
+            .equals(other.getCellBlockCodecClass());
+      }
+      result = result && (hasCellBlockCompressorClass() == other.hasCellBlockCompressorClass());
+      if (hasCellBlockCompressorClass()) {
+        result = result && getCellBlockCompressorClass()
+            .equals(other.getCellBlockCompressorClass());
+      }
+      result = result && (hasVersionInfo() == other.hasVersionInfo());
+      if (hasVersionInfo()) {
+        result = result && getVersionInfo()
+            .equals(other.getVersionInfo());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasUserInfo()) {
+        hash = (37 * hash) + USER_INFO_FIELD_NUMBER;
+        hash = (53 * hash) + getUserInfo().hashCode();
+      }
+      if (hasServiceName()) {
+        hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
+        hash = (53 * hash) + getServiceName().hashCode();
+      }
+      if (hasCellBlockCodecClass()) {
+        hash = (37 * hash) + CELL_BLOCK_CODEC_CLASS_FIELD_NUMBER;
+        hash = (53 * hash) + getCellBlockCodecClass().hashCode();
+      }
+      if (hasCellBlockCompressorClass()) {
+        hash = (37 * hash) + CELL_BLOCK_COMPRESSOR_CLASS_FIELD_NUMBER;
+        hash = (53 * hash) + getCellBlockCompressorClass().hashCode();
+      }
+      if (hasVersionInfo()) {
+        hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER;
+        hash = (53 * hash) + getVersionInfo().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code hbase.pb.ConnectionHeader}
+     *
+     * <pre>
+     * This is sent on connection setup after the connection preamble is sent.
+     * </pre>
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getUserInfoFieldBuilder();
+          getVersionInfoFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        if (userInfoBuilder_ == null) {
+          userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+        } else {
+          userInfoBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000001);
+        serviceName_ = "";
+        bitField0_ = (bitField0_ & ~0x00000002);
+        cellBlockCodecClass_ = "";
+        bitField0_ = (bitField0_ & ~0x00000004);
+        cellBlockCompressorClass_ = "";
+        bitField0_ = (bitField0_ & ~0x00000008);
+        if (versionInfoBuilder_ == null) {
+          versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance();
+        } else {
+          versionInfoBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000010);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeader_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader build() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader buildPartial() {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        if (userInfoBuilder_ == null) {
+          result.userInfo_ = userInfo_;
+        } else {
+          result.userInfo_ = userInfoBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.serviceName_ = serviceName_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.cellBlockCodecClass_ = cellBlockCodecClass_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.cellBlockCompressorClass_ = cellBlockCompressorClass_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        if (versionInfoBuilder_ == null) {
+          result.versionInfo_ = versionInfo_;
+        } else {
+          result.versionInfo_ = versionInfoBuilder_.build();
+        }
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) {
+          return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader other) {
+        if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader.getDefaultInstance()) return this;
+        if (other.hasUserInfo()) {
+          mergeUserInfo(other.getUserInfo());
+        }
+        if (other.hasServiceName()) {
+          bitField0_ |= 0x00000002;
+          serviceName_ = other.serviceName_;
+          onChanged();
+        }
+        if (other.hasCellBlockCodecClass()) {
+          bitField0_ |= 0x00000004;
+          cellBlockCodecClass_ = other.cellBlockCodecClass_;
+          onChanged();
+        }
+        if (other.hasCellBlockCompressorClass()) {
+          bitField0_ |= 0x00000008;
+          cellBlockCompressorClass_ = other.cellBlockCompressorClass_;
+          onChanged();
+        }
+        if (other.hasVersionInfo()) {
+          mergeVersionInfo(other.getVersionInfo());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (hasUserInfo()) {
+          if (!getUserInfo().isInitialized()) {
+            
+            return false;
+          }
+        }
+        if (hasVersionInfo()) {
+          if (!getVersionInfo().isInitialized()) {
+            
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // optional .hbase.pb.UserInformation user_info = 1;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_;
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public boolean hasUserInfo() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+        if (userInfoBuilder_ == null) {
+          return userInfo_;
+        } else {
+          return userInfoBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public Builder setUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) {
+        if (userInfoBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          userInfo_ = value;
+          onChanged();
+        } else {
+          userInfoBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000001;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public Builder setUserInfo(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) {
+        if (userInfoBuilder_ == null) {
+          userInfo_ = builderForValue.build();
+          onChanged();
+        } else {
+          userInfoBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000001;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public Builder mergeUserInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation value) {
+        if (userInfoBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) == 0x00000001) &&
+              userInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) {
+            userInfo_ =
+              org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial();
+          } else {
+            userInfo_ = value;
+          }
+          onChanged();
+        } else {
+          userInfoBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000001;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public Builder clearUserInfo() {
+        if (userInfoBuilder_ == null) {
+          userInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+          onChanged();
+        } else {
+          userInfoBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000001);
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() {
+        bitField0_ |= 0x00000001;
+        onChanged();
+        return getUserInfoFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+        if (userInfoBuilder_ != null) {
+          return userInfoBuilder_.getMessageOrBuilder();
+        } else {
+          return userInfo_;
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.UserInformation user_info = 1;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder> 
+          getUserInfoFieldBuilder() {
+        if (userInfoBuilder_ == null) {
+          userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformationOrBuilder>(
+                  userInfo_,
+                  getParentForChildren(),
+                  isClean());
+          userInfo_ = null;
+        }
+        return userInfoBuilder_;
+      }
+
+      // optional string service_name = 2;
+      private java.lang.Object serviceName_ = "";
+      /**
+       * <code>optional string service_name = 2;</code>
+       */
+      public boolean hasServiceName() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional string service_name = 2;</code>
+       */
+      public java.lang.String getServiceName() {
+        java.lang.Object ref = serviceName_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          serviceName_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string service_name = 2;</code>
+       */
+      public com.google.protobuf.ByteString
+          getServiceNameBytes() {
+        java.lang.Object ref = serviceName_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          serviceName_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string service_name = 2;</code>
+       */
+      public Builder setServiceName(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        serviceName_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string service_name = 2;</code>
+       */
+      public Builder clearServiceName() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        serviceName_ = getDefaultInstance().getServiceName();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string service_name = 2;</code>
+       */
+      public Builder setServiceNameBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        serviceName_ = value;
+        onChanged();
+        return this;
+      }
+
+      // optional string cell_block_codec_class = 3;
+      private java.lang.Object cellBlockCodecClass_ = "";
+      /**
+       * <code>optional string cell_block_codec_class = 3;</code>
+       *
+       * <pre>
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+       * </pre>
+       */
+      public boolean hasCellBlockCodecClass() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional string cell_block_codec_class = 3;</code>
+       *
+       * <pre>
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+       * </pre>
+       */
+      public java.lang.String getCellBlockCodecClass() {
+        java.lang.Object ref = cellBlockCodecClass_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          cellBlockCodecClass_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string cell_block_codec_class = 3;</code>
+       *
+       * <pre>
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+       * </pre>
+       */
+      public com.google.protobuf.ByteString
+          getCellBlockCodecClassBytes() {
+        java.lang.Object ref = cellBlockCodecClass_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          cellBlockCodecClass_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string cell_block_codec_class = 3;</code>
+       *
+       * <pre>
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+       * </pre>
+       */
+      public Builder setCellBlockCodecClass(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        cellBlockCodecClass_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string cell_block_codec_class = 3;</code>
+       *
+       * <pre>
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+       * </pre>
+       */
+      public Builder clearCellBlockCodecClass() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        cellBlockCodecClass_ = getDefaultInstance().getCellBlockCodecClass();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string cell_block_codec_class = 3;</code>
+       *
+       * <pre>
+       * Cell block codec we will use sending over optional cell blocks.  Server throws exception
+       * if cannot deal.  Null means no codec'ing going on so we are pb all the time (SLOW!!!)
+       * </pre>
+       */
+      public Builder setCellBlockCodecClassBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        cellBlockCodecClass_ = value;
+        onChanged();
+        return this;
+      }
+
+      // optional string cell_block_compressor_class = 4;
+      private java.lang.Object cellBlockCompressorClass_ = "";
+      /**
+       * <code>optional string cell_block_compressor_class = 4;</code>
+       *
+       * <pre>
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+       * </pre>
+       */
+      public boolean hasCellBlockCompressorClass() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional string cell_block_compressor_class = 4;</code>
+       *
+       * <pre>
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+       * </pre>
+       */
+      public java.lang.String getCellBlockCompressorClass() {
+        java.lang.Object ref = cellBlockCompressorClass_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          cellBlockCompressorClass_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string cell_block_compressor_class = 4;</code>
+       *
+       * <pre>
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+       * </pre>
+       */
+      public com.google.protobuf.ByteString
+          getCellBlockCompressorClassBytes() {
+        java.lang.Object ref = cellBlockCompressorClass_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          cellBlockCompressorClass_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string cell_block_compressor_class = 4;</code>
+       *
+       * <pre>
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+       * </pre>
+       */
+      public Builder setCellBlockCompressorClass(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        cellBlockCompressorClass_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string cell_block_compressor_class = 4;</code>
+       *
+       * <pre>
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+       * </pre>
+       */
+      public Builder clearCellBlockCompressorClass() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        cellBlockCompressorClass_ = getDefaultInstance().getCellBlockCompressorClass();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string cell_block_compressor_class = 4;</code>
+       *
+       * <pre>
+       * Compressor we will use if cell block is compressed.  Server will throw exception if not supported.
+       * Class must implement hadoop's CompressionCodec Interface.  Can't compress if no codec.
+       * </pre>
+       */
+      public Builder setCellBlockCompressorClassBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        cellBlockCompressorClass_ = value;
+        onChanged();
+        return this;
+      }
+
+      // optional .hbase.pb.VersionInfo version_info = 5;
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> versionInfoBuilder_;
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public boolean hasVersionInfo() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo getVersionInfo() {
+        if (versionInfoBuilder_ == null) {
+          return versionInfo_;
+        } else {
+          return versionInfoBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public Builder setVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) {
+        if (versionInfoBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          versionInfo_ = value;
+          onChanged();
+        } else {
+          versionInfoBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000010;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public Builder setVersionInfo(
+          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder builderForValue) {
+        if (versionInfoBuilder_ == null) {
+          versionInfo_ = builderForValue.build();
+          onChanged();
+        } else {
+          versionInfoBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000010;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public Builder mergeVersionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo value) {
+        if (versionInfoBuilder_ == null) {
+          if (((bitField0_ & 0x00000010) == 0x00000010) &&
+              versionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance()) {
+            versionInfo_ =
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.newBuilder(versionInfo_).mergeFrom(value).buildPartial();
+          } else {
+            versionInfo_ = value;
+          }
+          onChanged();
+        } else {
+          versionInfoBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000010;
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public Builder clearVersionInfo() {
+        if (versionInfoBuilder_ == null) {
+          versionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance();
+          onChanged();
+        } else {
+          versionInfoBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000010);
+        return this;
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder getVersionInfoBuilder() {
+        bitField0_ |= 0x00000010;
+        onChanged();
+        return getVersionInfoFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder() {
+        if (versionInfoBuilder_ != null) {
+          return versionInfoBuilder_.getMessageOrBuilder();
+        } else {
+          return versionInfo_;
+        }
+      }
+      /**
+       * <code>optional .hbase.pb.VersionInfo version_info = 5;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder> 
+          getVersionInfoFieldBuilder() {
+        if (versionInfoBuilder_ == null) {
+          versionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfoOrBuilder>(
+                  versionInfo_,
+                  getParentForChildren(),
+                  isClean());
+          versionInfo_ = null;
+        }
+        return versionInfoBuilder_;
+      }
+
+      // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeader)
+    }
+
+    static {
+      defaultInstance = new ConnectionHeader(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader)
+  }
+
+  public interface CellBlockMetaOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // optional uint32 length = 1;
+    /**
+     * <code>optional uint32 length = 1;</code>
+     *
+     * <pre>
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * </pre>
+     */
+    boolean hasLength();
+    /**
+     * <code>optional uint32 length = 1;</code>
+     *
+     * <pre>
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * </pre>
+     */
+    int getLength();
+  }
+  /**
+   * Protobuf type {@code hbase.pb.CellBlockMeta}
+   *
+   * <pre>
+   * Optional Cell block Message.  Included in client RequestHeader
+   * </pre>
+   */
+  public static final class CellBlockMeta extends
+      com.google.protobuf.GeneratedMessage
+      implements CellBlockMetaOrBuilder {
+    // Use CellBlockMeta.newBuilder() to construct.
+    private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final CellBlockMeta defaultInstance;
+    public static CellBlockMeta getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public CellBlockMeta getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private CellBlockMeta(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              length_ = input.readUInt32();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<CellBlockMeta> PARSER =
+        new com.google.protobuf.AbstractParser<CellBlockMeta>() {
+      public CellBlockMeta parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new CellBlockMeta(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<CellBlockMeta> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // optional uint32 length = 1;
+    public static final int LENGTH_FIELD_NUMBER = 1;
+    private int length_;
+    /**
+     * <code>optional uint32 length = 1;</code>
+     *
+     * <pre>
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * </pre>
+     */
+    public boolean hasLength() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>optional uint32 length = 1;</code>
+     *
+     * <pre>
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * </pre>
+     */
+    public int getLength() {
+      return length_;
+    }
+
+    private void initFields() {
+      length_ = 0;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeUInt32(1, length_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(1, length_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta) obj;
+
+      boolean result = true;
+      result = result && (hasLength() == other.hasLength());
+      if (hasLength()) {
+        result = result && (getLength()
+            == other.getLength());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasLength()) {
+        hash = (37 * hash) + LENGTH_FIELD_NUMBER;
+        hash = (53 * hash) + getLength();
+      }
+      hash = (29 * 

<TRUNCATED>