You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/10/01 18:14:52 UTC

[05/35] hbase git commit: Forgot to add shaded google

http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java
new file mode 100644
index 0000000..716cd7d
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java
@@ -0,0 +1,451 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * Wrapper message for `uint32`.
+ * The JSON representation for `UInt32Value` is JSON number.
+ * </pre>
+ *
+ * Protobuf type {@code google.protobuf.UInt32Value}
+ */
+public  final class UInt32Value extends
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+    // @@protoc_insertion_point(message_implements:google.protobuf.UInt32Value)
+    UInt32ValueOrBuilder {
+  // Use UInt32Value.newBuilder() to construct.
+  private UInt32Value(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+  private UInt32Value() {
+    value_ = 0;
+  }
+
+  @java.lang.Override
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+  }
+  private UInt32Value(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    int mutable_bitField0_ = 0;
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          default: {
+            if (!input.skipField(tag)) {
+              done = true;
+            }
+            break;
+          }
+          case 8: {
+
+            value_ = input.readUInt32();
+            break;
+          }
+        }
+      }
+    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+          e).setUnfinishedMessage(this);
+    } finally {
+      makeExtensionsImmutable();
+    }
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt32Value_descriptor;
+  }
+
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internalGetFieldAccessorTable() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt32Value_fieldAccessorTable
+        .ensureFieldAccessorsInitialized(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.Builder.class);
+  }
+
+  public static final int VALUE_FIELD_NUMBER = 1;
+  private int value_;
+  /**
+   * <pre>
+   * The uint32 value.
+   * </pre>
+   *
+   * <code>optional uint32 value = 1;</code>
+   */
+  public int getValue() {
+    return value_;
+  }
+
+  private byte memoizedIsInitialized = -1;
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                      throws java.io.IOException {
+    if (value_ != 0) {
+      output.writeUInt32(1, value_);
+    }
+  }
+
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    if (value_ != 0) {
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+        .computeUInt32Size(1, value_);
+    }
+    memoizedSize = size;
+    return size;
+  }
+
+  private static final long serialVersionUID = 0L;
+  @java.lang.Override
+  public boolean equals(final java.lang.Object obj) {
+    if (obj == this) {
+     return true;
+    }
+    if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value)) {
+      return super.equals(obj);
+    }
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value) obj;
+
+    boolean result = true;
+    result = result && (getValue()
+        == other.getValue());
+    return result;
+  }
+
+  @java.lang.Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptorForType().hashCode();
+    hash = (37 * hash) + VALUE_FIELD_NUMBER;
+    hash = (53 * hash) + getValue();
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(byte[] data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(
+      byte[] data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseDelimitedFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseDelimitedFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public Builder newBuilderForType() { return newBuilder(); }
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+  public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+        ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @java.lang.Override
+  protected Builder newBuilderForType(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+  /**
+   * <pre>
+   * Wrapper message for `uint32`.
+   * The JSON representation for `UInt32Value` is JSON number.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.UInt32Value}
+   */
+  public static final class Builder extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+      // @@protoc_insertion_point(builder_implements:google.protobuf.UInt32Value)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32ValueOrBuilder {
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt32Value_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt32Value_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.Builder.class);
+    }
+
+    // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+    private void maybeForceBuilderInitialization() {
+      if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+    public Builder clear() {
+      super.clear();
+      value_ = 0;
+
+      return this;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptorForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt32Value_descriptor;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value getDefaultInstanceForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.getDefaultInstance();
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value build() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value buildPartial() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value(this);
+      result.value_ = value_;
+      onBuilt();
+      return result;
+    }
+
+    public Builder clone() {
+      return (Builder) super.clone();
+    }
+    public Builder setField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.setField(field, value);
+    }
+    public Builder clearField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return (Builder) super.clearField(field);
+    }
+    public Builder clearOneof(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return (Builder) super.clearOneof(oneof);
+    }
+    public Builder setRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        int index, Object value) {
+      return (Builder) super.setRepeatedField(field, index, value);
+    }
+    public Builder addRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.addRepeatedField(field, value);
+    }
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+      if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value) {
+        return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value)other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value other) {
+      if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value.getDefaultInstance()) return this;
+      if (other.getValue() != 0) {
+        setValue(other.getValue());
+      }
+      onChanged();
+      return this;
+    }
+
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    public Builder mergeFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+
+    private int value_ ;
+    /**
+     * <pre>
+     * The uint32 value.
+     * </pre>
+     *
+     * <code>optional uint32 value = 1;</code>
+     */
+    public int getValue() {
+      return value_;
+    }
+    /**
+     * <pre>
+     * The uint32 value.
+     * </pre>
+     *
+     * <code>optional uint32 value = 1;</code>
+     */
+    public Builder setValue(int value) {
+      
+      value_ = value;
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * The uint32 value.
+     * </pre>
+     *
+     * <code>optional uint32 value = 1;</code>
+     */
+    public Builder clearValue() {
+      
+      value_ = 0;
+      onChanged();
+      return this;
+    }
+    public final Builder setUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+    public final Builder mergeUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:google.protobuf.UInt32Value)
+  }
+
+  // @@protoc_insertion_point(class_scope:google.protobuf.UInt32Value)
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value DEFAULT_INSTANCE;
+  static {
+    DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value();
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UInt32Value>
+      PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<UInt32Value>() {
+    public UInt32Value parsePartialFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+        return new UInt32Value(input, extensionRegistry);
+    }
+  };
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UInt32Value> parser() {
+    return PARSER;
+  }
+
+  @java.lang.Override
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UInt32Value> getParserForType() {
+    return PARSER;
+  }
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32ValueOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32ValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32ValueOrBuilder.java
new file mode 100644
index 0000000..d52f0e0
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32ValueOrBuilder.java
@@ -0,0 +1,18 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public interface UInt32ValueOrBuilder extends
+    // @@protoc_insertion_point(interface_extends:google.protobuf.UInt32Value)
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <pre>
+   * The uint32 value.
+   * </pre>
+   *
+   * <code>optional uint32 value = 1;</code>
+   */
+  int getValue();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java
new file mode 100644
index 0000000..7ed4029
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java
@@ -0,0 +1,452 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * Wrapper message for `uint64`.
+ * The JSON representation for `UInt64Value` is JSON string.
+ * </pre>
+ *
+ * Protobuf type {@code google.protobuf.UInt64Value}
+ */
+public  final class UInt64Value extends
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+    // @@protoc_insertion_point(message_implements:google.protobuf.UInt64Value)
+    UInt64ValueOrBuilder {
+  // Use UInt64Value.newBuilder() to construct.
+  private UInt64Value(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+  private UInt64Value() {
+    value_ = 0L;
+  }
+
+  @java.lang.Override
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+  }
+  private UInt64Value(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    int mutable_bitField0_ = 0;
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          default: {
+            if (!input.skipField(tag)) {
+              done = true;
+            }
+            break;
+          }
+          case 8: {
+
+            value_ = input.readUInt64();
+            break;
+          }
+        }
+      }
+    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+          e).setUnfinishedMessage(this);
+    } finally {
+      makeExtensionsImmutable();
+    }
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt64Value_descriptor;
+  }
+
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internalGetFieldAccessorTable() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt64Value_fieldAccessorTable
+        .ensureFieldAccessorsInitialized(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.Builder.class);
+  }
+
+  public static final int VALUE_FIELD_NUMBER = 1;
+  private long value_;
+  /**
+   * <pre>
+   * The uint64 value.
+   * </pre>
+   *
+   * <code>optional uint64 value = 1;</code>
+   */
+  public long getValue() {
+    return value_;
+  }
+
+  private byte memoizedIsInitialized = -1;
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                      throws java.io.IOException {
+    if (value_ != 0L) {
+      output.writeUInt64(1, value_);
+    }
+  }
+
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    if (value_ != 0L) {
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+        .computeUInt64Size(1, value_);
+    }
+    memoizedSize = size;
+    return size;
+  }
+
+  private static final long serialVersionUID = 0L;
+  @java.lang.Override
+  public boolean equals(final java.lang.Object obj) {
+    if (obj == this) {
+     return true;
+    }
+    if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value)) {
+      return super.equals(obj);
+    }
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value) obj;
+
+    boolean result = true;
+    result = result && (getValue()
+        == other.getValue());
+    return result;
+  }
+
+  @java.lang.Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptorForType().hashCode();
+    hash = (37 * hash) + VALUE_FIELD_NUMBER;
+    hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+        getValue());
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(byte[] data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(
+      byte[] data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseDelimitedFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseDelimitedFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public Builder newBuilderForType() { return newBuilder(); }
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+  public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+        ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @java.lang.Override
+  protected Builder newBuilderForType(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+  /**
+   * <pre>
+   * Wrapper message for `uint64`.
+   * The JSON representation for `UInt64Value` is JSON string.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.UInt64Value}
+   */
+  public static final class Builder extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+      // @@protoc_insertion_point(builder_implements:google.protobuf.UInt64Value)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64ValueOrBuilder {
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt64Value_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt64Value_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.Builder.class);
+    }
+
+    // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+    private void maybeForceBuilderInitialization() {
+      if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+    public Builder clear() {
+      super.clear();
+      value_ = 0L;
+
+      return this;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptorForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_UInt64Value_descriptor;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value getDefaultInstanceForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.getDefaultInstance();
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value build() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value buildPartial() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value(this);
+      result.value_ = value_;
+      onBuilt();
+      return result;
+    }
+
+    public Builder clone() {
+      return (Builder) super.clone();
+    }
+    public Builder setField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.setField(field, value);
+    }
+    public Builder clearField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return (Builder) super.clearField(field);
+    }
+    public Builder clearOneof(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return (Builder) super.clearOneof(oneof);
+    }
+    public Builder setRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        int index, Object value) {
+      return (Builder) super.setRepeatedField(field, index, value);
+    }
+    public Builder addRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.addRepeatedField(field, value);
+    }
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+      if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value) {
+        return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value)other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value other) {
+      if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value.getDefaultInstance()) return this;
+      if (other.getValue() != 0L) {
+        setValue(other.getValue());
+      }
+      onChanged();
+      return this;
+    }
+
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    public Builder mergeFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+
+    private long value_ ;
+    /**
+     * <pre>
+     * The uint64 value.
+     * </pre>
+     *
+     * <code>optional uint64 value = 1;</code>
+     */
+    public long getValue() {
+      return value_;
+    }
+    /**
+     * <pre>
+     * The uint64 value.
+     * </pre>
+     *
+     * <code>optional uint64 value = 1;</code>
+     */
+    public Builder setValue(long value) {
+      
+      value_ = value;
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * The uint64 value.
+     * </pre>
+     *
+     * <code>optional uint64 value = 1;</code>
+     */
+    public Builder clearValue() {
+      
+      value_ = 0L;
+      onChanged();
+      return this;
+    }
+    public final Builder setUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+    public final Builder mergeUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:google.protobuf.UInt64Value)
+  }
+
+  // @@protoc_insertion_point(class_scope:google.protobuf.UInt64Value)
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value DEFAULT_INSTANCE;
+  static {
+    DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value();
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UInt64Value>
+      PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<UInt64Value>() {
+    public UInt64Value parsePartialFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+        return new UInt64Value(input, extensionRegistry);
+    }
+  };
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UInt64Value> parser() {
+    return PARSER;
+  }
+
+  @java.lang.Override
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<UInt64Value> getParserForType() {
+    return PARSER;
+  }
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64ValueOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64ValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64ValueOrBuilder.java
new file mode 100644
index 0000000..bbdc59a
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64ValueOrBuilder.java
@@ -0,0 +1,18 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public interface UInt64ValueOrBuilder extends
+    // @@protoc_insertion_point(interface_extends:google.protobuf.UInt64Value)
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <pre>
+   * The uint64 value.
+   * </pre>
+   *
+   * <code>optional uint64 value = 1;</code>
+   */
+  long getValue();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UninitializedMessageException.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UninitializedMessageException.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UninitializedMessageException.java
new file mode 100644
index 0000000..ede9d0d
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UninitializedMessageException.java
@@ -0,0 +1,99 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Thrown when attempting to build a protocol message that is missing required
+ * fields.  This is a {@code RuntimeException} because it normally represents
+ * a programming error:  it happens when some code which constructs a message
+ * fails to set all the fields.  {@code parseFrom()} methods <b>do not</b>
+ * throw this; they throw an {@link InvalidProtocolBufferException} if
+ * required fields are missing, because it is not a programming error to
+ * receive an incomplete message.  In other words,
+ * {@code UninitializedMessageException} should never be thrown by correct
+ * code, but {@code InvalidProtocolBufferException} might be.
+ *
+ * @author kenton@google.com Kenton Varda
+ */
+public class UninitializedMessageException extends RuntimeException {
+  private static final long serialVersionUID = -7466929953374883507L;
+
+  public UninitializedMessageException(final MessageLite message) {
+    super("Message was missing required fields.  (Lite runtime could not " +
+          "determine which fields were missing).");
+    missingFields = null;
+  }
+
+  public UninitializedMessageException(final List<String> missingFields) {
+    super(buildDescription(missingFields));
+    this.missingFields = missingFields;
+  }
+
+  private final List<String> missingFields;
+
+  /**
+   * Get a list of human-readable names of required fields missing from this
+   * message.  Each name is a full path to a field, e.g. "foo.bar[5].baz".
+   * Returns null if the lite runtime was used, since it lacks the ability to
+   * find missing fields.
+   */
+  public List<String> getMissingFields() {
+    return Collections.unmodifiableList(missingFields);
+  }
+
+  /**
+   * Converts this exception to an {@link InvalidProtocolBufferException}.
+   * When a parsed message is missing required fields, this should be thrown
+   * instead of {@code UninitializedMessageException}.
+   */
+  public InvalidProtocolBufferException asInvalidProtocolBufferException() {
+    return new InvalidProtocolBufferException(getMessage());
+  }
+
+  /** Construct the description string for this exception. */
+  private static String buildDescription(final List<String> missingFields) {
+    final StringBuilder description =
+      new StringBuilder("Message missing required fields: ");
+    boolean first = true;
+    for (final String field : missingFields) {
+      if (first) {
+        first = false;
+      } else {
+        description.append(", ");
+      }
+      description.append(field);
+    }
+    return description.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/401aa064/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java
new file mode 100644
index 0000000..47b9e12
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java
@@ -0,0 +1,1037 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.LimitedInputStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * {@code UnknownFieldSet} is used to keep track of fields which were seen when
+ * parsing a protocol message but whose field numbers or types are unrecognized.
+ * This most frequently occurs when new fields are added to a message type
+ * and then messages containing those fields are read by old software that was
+ * compiled before the new types were added.
+ *
+ * <p>Every {@link Message} contains an {@code UnknownFieldSet} (and every
+ * {@link Message.Builder} contains an {@link Builder}).
+ *
+ * <p>Most users will never need to use this class.
+ *
+ * @author kenton@google.com Kenton Varda
+ */
+public final class UnknownFieldSet implements MessageLite {
+
+  private UnknownFieldSet() {}
+
+  /** Create a new {@link Builder}. */
+  public static Builder newBuilder() {
+    return Builder.create();
+  }
+
+  /**
+   * Create a new {@link Builder} and initialize it to be a copy
+   * of {@code copyFrom}.
+   */
+  public static Builder newBuilder(final UnknownFieldSet copyFrom) {
+    return newBuilder().mergeFrom(copyFrom);
+  }
+
+  /** Get an empty {@code UnknownFieldSet}. */
+  public static UnknownFieldSet getDefaultInstance() {
+    return defaultInstance;
+  }
+  @Override
+  public UnknownFieldSet getDefaultInstanceForType() {
+    return defaultInstance;
+  }
+  private static final UnknownFieldSet defaultInstance =
+    new UnknownFieldSet(Collections.<Integer, Field>emptyMap());
+
+  /**
+   * Construct an {@code UnknownFieldSet} around the given map.  The map is
+   * expected to be immutable.
+   */
+  private UnknownFieldSet(final Map<Integer, Field> fields) {
+    this.fields = fields;
+  }
+  private Map<Integer, Field> fields;
+
+
+  @Override
+  public boolean equals(final Object other) {
+    if (this == other) {
+      return true;
+    }
+    return (other instanceof UnknownFieldSet) &&
+           fields.equals(((UnknownFieldSet) other).fields);
+  }
+
+  @Override
+  public int hashCode() {
+    return fields.hashCode();
+  }
+
+  /** Get a map of fields in the set by number. */
+  public Map<Integer, Field> asMap() {
+    return fields;
+  }
+
+  /** Check if the given field number is present in the set. */
+  public boolean hasField(final int number) {
+    return fields.containsKey(number);
+  }
+
+  /**
+   * Get a field by number.  Returns an empty field if not present.  Never
+   * returns {@code null}.
+   */
+  public Field getField(final int number) {
+    final Field result = fields.get(number);
+    return (result == null) ? Field.getDefaultInstance() : result;
+  }
+
+  /** Serializes the set and writes it to {@code output}. */
+  @Override
+  public void writeTo(final CodedOutputStream output) throws IOException {
+    for (final Map.Entry<Integer, Field> entry : fields.entrySet()) {
+      Field field = entry.getValue();
+      field.writeTo(entry.getKey(), output);
+    }
+  }
+
+  /**
+   * Converts the set to a string in protocol buffer text format. This is
+   * just a trivial wrapper around
+   * {@link TextFormat#printToString(UnknownFieldSet)}.
+   */
+  @Override
+  public String toString() {
+    return TextFormat.printToString(this);
+  }
+
+  /**
+   * Serializes the message to a {@code ByteString} and returns it. This is
+   * just a trivial wrapper around {@link #writeTo(CodedOutputStream)}.
+   */
+  @Override
+  public ByteString toByteString() {
+    try {
+      final ByteString.CodedBuilder out =
+        ByteString.newCodedBuilder(getSerializedSize());
+      writeTo(out.getCodedOutput());
+      return out.build();
+    } catch (final IOException e) {
+      throw new RuntimeException(
+        "Serializing to a ByteString threw an IOException (should " +
+        "never happen).", e);
+    }
+  }
+
+  /**
+   * Serializes the message to a {@code byte} array and returns it.  This is
+   * just a trivial wrapper around {@link #writeTo(CodedOutputStream)}.
+   */
+  @Override
+  public byte[] toByteArray() {
+    try {
+      final byte[] result = new byte[getSerializedSize()];
+      final CodedOutputStream output = CodedOutputStream.newInstance(result);
+      writeTo(output);
+      output.checkNoSpaceLeft();
+      return result;
+    } catch (final IOException e) {
+      throw new RuntimeException(
+        "Serializing to a byte array threw an IOException " +
+        "(should never happen).", e);
+    }
+  }
+
+  /**
+   * Serializes the message and writes it to {@code output}.  This is just a
+   * trivial wrapper around {@link #writeTo(CodedOutputStream)}.
+   */
+  @Override
+  public void writeTo(final OutputStream output) throws IOException {
+    final CodedOutputStream codedOutput = CodedOutputStream.newInstance(output);
+    writeTo(codedOutput);
+    codedOutput.flush();
+  }
+
+  @Override
+  public void writeDelimitedTo(OutputStream output) throws IOException {
+    final CodedOutputStream codedOutput = CodedOutputStream.newInstance(output);
+    codedOutput.writeRawVarint32(getSerializedSize());
+    writeTo(codedOutput);
+    codedOutput.flush();
+  }
+
+  /** Get the number of bytes required to encode this set. */
+  @Override
+  public int getSerializedSize() {
+    int result = 0;
+    for (final Map.Entry<Integer, Field> entry : fields.entrySet()) {
+      result += entry.getValue().getSerializedSize(entry.getKey());
+    }
+    return result;
+  }
+
+  /**
+   * Serializes the set and writes it to {@code output} using
+   * {@code MessageSet} wire format.
+   */
+  public void writeAsMessageSetTo(final CodedOutputStream output)
+      throws IOException {
+    for (final Map.Entry<Integer, Field> entry : fields.entrySet()) {
+      entry.getValue().writeAsMessageSetExtensionTo(
+        entry.getKey(), output);
+    }
+  }
+
+  /**
+   * Get the number of bytes required to encode this set using
+   * {@code MessageSet} wire format.
+   */
+  public int getSerializedSizeAsMessageSet() {
+    int result = 0;
+    for (final Map.Entry<Integer, Field> entry : fields.entrySet()) {
+      result += entry.getValue().getSerializedSizeAsMessageSetExtension(
+        entry.getKey());
+    }
+    return result;
+  }
+
+  @Override
+  public boolean isInitialized() {
+    // UnknownFieldSets do not have required fields, so they are always
+    // initialized.
+    return true;
+  }
+
+  /** Parse an {@code UnknownFieldSet} from the given input stream. */
+  public static UnknownFieldSet parseFrom(final CodedInputStream input)
+                                          throws IOException {
+    return newBuilder().mergeFrom(input).build();
+  }
+
+  /** Parse {@code data} as an {@code UnknownFieldSet} and return it. */
+  public static UnknownFieldSet parseFrom(final ByteString data)
+      throws InvalidProtocolBufferException {
+    return newBuilder().mergeFrom(data).build();
+  }
+
+  /** Parse {@code data} as an {@code UnknownFieldSet} and return it. */
+  public static UnknownFieldSet parseFrom(final byte[] data)
+      throws InvalidProtocolBufferException {
+    return newBuilder().mergeFrom(data).build();
+  }
+
+  /** Parse an {@code UnknownFieldSet} from {@code input} and return it. */
+  public static UnknownFieldSet parseFrom(final InputStream input)
+                                          throws IOException {
+    return newBuilder().mergeFrom(input).build();
+  }
+
+  @Override
+  public Builder newBuilderForType() {
+    return newBuilder();
+  }
+
+  @Override
+  public Builder toBuilder() {
+    return newBuilder().mergeFrom(this);
+  }
+
+  /**
+   * Builder for {@link UnknownFieldSet}s.
+   *
+   * <p>Note that this class maintains {@link Field.Builder}s for all fields
+   * in the set.  Thus, adding one element to an existing {@link Field} does not
+   * require making a copy.  This is important for efficient parsing of
+   * unknown repeated fields.  However, it implies that {@link Field}s cannot
+   * be constructed independently, nor can two {@link UnknownFieldSet}s share
+   * the same {@code Field} object.
+   *
+   * <p>Use {@link UnknownFieldSet#newBuilder()} to construct a {@code Builder}.
+   */
+  public static final class Builder implements MessageLite.Builder {
+    // This constructor should never be called directly (except from 'create').
+    private Builder() {}
+
+    private Map<Integer, Field> fields;
+
+    // Optimization:  We keep around a builder for the last field that was
+    //   modified so that we can efficiently add to it multiple times in a
+    //   row (important when parsing an unknown repeated field).
+    private int lastFieldNumber;
+    private Field.Builder lastField;
+
+    private static Builder create() {
+      Builder builder = new Builder();
+      builder.reinitialize();
+      return builder;
+    }
+
+    /**
+     * Get a field builder for the given field number which includes any
+     * values that already exist.
+     */
+    private Field.Builder getFieldBuilder(final int number) {
+      if (lastField != null) {
+        if (number == lastFieldNumber) {
+          return lastField;
+        }
+        // Note:  addField() will reset lastField and lastFieldNumber.
+        addField(lastFieldNumber, lastField.build());
+      }
+      if (number == 0) {
+        return null;
+      } else {
+        final Field existing = fields.get(number);
+        lastFieldNumber = number;
+        lastField = Field.newBuilder();
+        if (existing != null) {
+          lastField.mergeFrom(existing);
+        }
+        return lastField;
+      }
+    }
+
+    /**
+     * Build the {@link UnknownFieldSet} and return it.
+     *
+     * <p>Once {@code build()} has been called, the {@code Builder} will no
+     * longer be usable.  Calling any method after {@code build()} will result
+     * in undefined behavior and can cause a {@code NullPointerException} to be
+     * thrown.
+     */
+    @Override
+    public UnknownFieldSet build() {
+      getFieldBuilder(0);  // Force lastField to be built.
+      final UnknownFieldSet result;
+      if (fields.isEmpty()) {
+        result = getDefaultInstance();
+      } else {
+        result = new UnknownFieldSet(Collections.unmodifiableMap(fields));
+      }
+      fields = null;
+      return result;
+    }
+
+    @Override
+    public UnknownFieldSet buildPartial() {
+      // No required fields, so this is the same as build().
+      return build();
+    }
+
+    @Override
+    public Builder clone() {
+      getFieldBuilder(0);  // Force lastField to be built.
+      return UnknownFieldSet.newBuilder().mergeFrom(
+          new UnknownFieldSet(fields));
+    }
+
+    @Override
+    public UnknownFieldSet getDefaultInstanceForType() {
+      return UnknownFieldSet.getDefaultInstance();
+    }
+
+    private void reinitialize() {
+      fields = Collections.emptyMap();
+      lastFieldNumber = 0;
+      lastField = null;
+    }
+
+    /** Reset the builder to an empty set. */
+    @Override
+    public Builder clear() {
+      reinitialize();
+      return this;
+    }
+    
+    /** Clear fields from the set with a given field number. */
+    public Builder clearField(final int number) {
+      if (number == 0) {
+        throw new IllegalArgumentException("Zero is not a valid field number.");
+      }
+      if (lastField != null && lastFieldNumber == number) {
+        // Discard this.
+        lastField = null;
+        lastFieldNumber = 0;
+      }
+      if (fields.containsKey(number)) {
+        fields.remove(number);
+      }
+      return this;
+    }
+
+    /**
+     * Merge the fields from {@code other} into this set.  If a field number
+     * exists in both sets, {@code other}'s values for that field will be
+     * appended to the values in this set.
+     */
+    public Builder mergeFrom(final UnknownFieldSet other) {
+      if (other != getDefaultInstance()) {
+        for (final Map.Entry<Integer, Field> entry : other.fields.entrySet()) {
+          mergeField(entry.getKey(), entry.getValue());
+        }
+      }
+      return this;
+    }
+
+    /**
+     * Add a field to the {@code UnknownFieldSet}.  If a field with the same
+     * number already exists, the two are merged.
+     */
+    public Builder mergeField(final int number, final Field field) {
+      if (number == 0) {
+        throw new IllegalArgumentException("Zero is not a valid field number.");
+      }
+      if (hasField(number)) {
+        getFieldBuilder(number).mergeFrom(field);
+      } else {
+        // Optimization:  We could call getFieldBuilder(number).mergeFrom(field)
+        // in this case, but that would create a copy of the Field object.
+        // We'd rather reuse the one passed to us, so call addField() instead.
+        addField(number, field);
+      }
+      return this;
+    }
+
+    /**
+     * Convenience method for merging a new field containing a single varint
+     * value.  This is used in particular when an unknown enum value is
+     * encountered.
+     */
+    public Builder mergeVarintField(final int number, final int value) {
+      if (number == 0) {
+        throw new IllegalArgumentException("Zero is not a valid field number.");
+      }
+      getFieldBuilder(number).addVarint(value);
+      return this;
+    }
+
+
+    /**
+     * Convenience method for merging a length-delimited field.
+     *
+     * <p>For use by generated code only.
+     */
+    public Builder mergeLengthDelimitedField(
+        final int number, final ByteString value) {  
+      if (number == 0) {
+        throw new IllegalArgumentException("Zero is not a valid field number.");
+      }
+      getFieldBuilder(number).addLengthDelimited(value);
+      return this;
+    }
+
+    /** Check if the given field number is present in the set. */
+    public boolean hasField(final int number) {
+      if (number == 0) {
+        throw new IllegalArgumentException("Zero is not a valid field number.");
+      }
+      return number == lastFieldNumber || fields.containsKey(number);
+    }
+
+    /**
+     * Add a field to the {@code UnknownFieldSet}.  If a field with the same
+     * number already exists, it is removed.
+     */
+    public Builder addField(final int number, final Field field) {
+      if (number == 0) {
+        throw new IllegalArgumentException("Zero is not a valid field number.");
+      }
+      if (lastField != null && lastFieldNumber == number) {
+        // Discard this.
+        lastField = null;
+        lastFieldNumber = 0;
+      }
+      if (fields.isEmpty()) {
+        fields = new TreeMap<Integer,Field>();
+      }
+      fields.put(number, field);
+      return this;
+    }
+
+    /**
+     * Get all present {@code Field}s as an immutable {@code Map}.  If more
+     * fields are added, the changes may or may not be reflected in this map.
+     */
+    public Map<Integer, Field> asMap() {
+      getFieldBuilder(0);  // Force lastField to be built.
+      return Collections.unmodifiableMap(fields);
+    }
+
+    /**
+     * Parse an entire message from {@code input} and merge its fields into
+     * this set.
+     */
+    @Override
+    public Builder mergeFrom(final CodedInputStream input) throws IOException {
+      while (true) {
+        final int tag = input.readTag();
+        if (tag == 0 || !mergeFieldFrom(tag, input)) {
+          break;
+        }
+      }
+      return this;
+    }
+
+    /**
+     * Parse a single field from {@code input} and merge it into this set.
+     * @param tag The field's tag number, which was already parsed.
+     * @return {@code false} if the tag is an end group tag.
+     */
+    public boolean mergeFieldFrom(final int tag, final CodedInputStream input)
+                                  throws IOException {
+      final int number = WireFormat.getTagFieldNumber(tag);
+      switch (WireFormat.getTagWireType(tag)) {
+        case WireFormat.WIRETYPE_VARINT:
+          getFieldBuilder(number).addVarint(input.readInt64());
+          return true;
+        case WireFormat.WIRETYPE_FIXED64:
+          getFieldBuilder(number).addFixed64(input.readFixed64());
+          return true;
+        case WireFormat.WIRETYPE_LENGTH_DELIMITED:
+          getFieldBuilder(number).addLengthDelimited(input.readBytes());
+          return true;
+        case WireFormat.WIRETYPE_START_GROUP:
+          final Builder subBuilder = newBuilder();
+          input.readGroup(number, subBuilder,
+                          ExtensionRegistry.getEmptyRegistry());
+          getFieldBuilder(number).addGroup(subBuilder.build());
+          return true;
+        case WireFormat.WIRETYPE_END_GROUP:
+          return false;
+        case WireFormat.WIRETYPE_FIXED32:
+          getFieldBuilder(number).addFixed32(input.readFixed32());
+          return true;
+        default:
+          throw InvalidProtocolBufferException.invalidWireType();
+      }
+    }
+
+    /**
+     * Parse {@code data} as an {@code UnknownFieldSet} and merge it with the
+     * set being built.  This is just a small wrapper around
+     * {@link #mergeFrom(CodedInputStream)}.
+     */
+    @Override
+    public Builder mergeFrom(final ByteString data) throws InvalidProtocolBufferException {
+      try {
+        final CodedInputStream input = data.newCodedInput();
+        mergeFrom(input);
+        input.checkLastTagWas(0);
+        return this;
+      } catch (final InvalidProtocolBufferException e) {
+        throw e;
+      } catch (final IOException e) {
+        throw new RuntimeException(
+          "Reading from a ByteString threw an IOException (should " +
+          "never happen).", e);
+      }
+    }
+
+    /**
+     * Parse {@code data} as an {@code UnknownFieldSet} and merge it with the
+     * set being built.  This is just a small wrapper around
+     * {@link #mergeFrom(CodedInputStream)}.
+     */
+    @Override
+    public Builder mergeFrom(final byte[] data) throws InvalidProtocolBufferException {
+      try {
+        final CodedInputStream input = CodedInputStream.newInstance(data);
+        mergeFrom(input);
+        input.checkLastTagWas(0);
+        return this;
+      } catch (final InvalidProtocolBufferException e) {
+        throw e;
+      } catch (final IOException e) {
+        throw new RuntimeException(
+          "Reading from a byte array threw an IOException (should " +
+          "never happen).", e);
+      }
+    }
+
+    /**
+     * Parse an {@code UnknownFieldSet} from {@code input} and merge it with the
+     * set being built.  This is just a small wrapper around
+     * {@link #mergeFrom(CodedInputStream)}.
+     */
+    @Override
+    public Builder mergeFrom(final InputStream input) throws IOException {
+      final CodedInputStream codedInput = CodedInputStream.newInstance(input);
+      mergeFrom(codedInput);
+      codedInput.checkLastTagWas(0);
+      return this;
+    }
+
+    @Override
+    public boolean mergeDelimitedFrom(InputStream input) throws IOException {
+      final int firstByte = input.read();
+      if (firstByte == -1) {
+        return false;
+      }
+      final int size = CodedInputStream.readRawVarint32(firstByte, input);
+      final InputStream limitedInput = new LimitedInputStream(input, size);
+      mergeFrom(limitedInput);
+      return true;
+    }
+
+    @Override
+    public boolean mergeDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
+        throws IOException {
+      // UnknownFieldSet has no extensions.
+      return mergeDelimitedFrom(input);
+    }
+
+    @Override
+    public Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
+        throws IOException {
+      // UnknownFieldSet has no extensions.
+      return mergeFrom(input);
+    }
+
+    @Override
+    public Builder mergeFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
+        throws InvalidProtocolBufferException {
+      // UnknownFieldSet has no extensions.
+      return mergeFrom(data);
+    }
+
+    @Override
+    public Builder mergeFrom(byte[] data, int off, int len) throws InvalidProtocolBufferException {
+      try {
+        final CodedInputStream input =
+            CodedInputStream.newInstance(data, off, len);
+        mergeFrom(input);
+        input.checkLastTagWas(0);
+        return this;
+      } catch (InvalidProtocolBufferException e) {
+        throw e;
+      } catch (IOException e) {
+        throw new RuntimeException(
+          "Reading from a byte array threw an IOException (should " +
+          "never happen).", e);
+      }
+    }
+
+    @Override
+    public Builder mergeFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
+        throws InvalidProtocolBufferException {
+      // UnknownFieldSet has no extensions.
+      return mergeFrom(data);
+    }
+
+    @Override
+    public Builder mergeFrom(byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry)
+        throws InvalidProtocolBufferException {
+      // UnknownFieldSet has no extensions.
+      return mergeFrom(data, off, len);
+    }
+
+    @Override
+    public Builder mergeFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
+        throws IOException {
+      // UnknownFieldSet has no extensions.
+      return mergeFrom(input);
+    }
+
+    @Override
+    public Builder mergeFrom(MessageLite m) {
+      if (m instanceof UnknownFieldSet) {
+        return mergeFrom((UnknownFieldSet) m);
+      }
+      throw new IllegalArgumentException(
+          "mergeFrom(MessageLite) can only merge messages of the same type.");
+    }
+
+    @Override
+    public boolean isInitialized() {
+      // UnknownFieldSets do not have required fields, so they are always
+      // initialized.
+      return true;
+    }
+  }
+
+  /**
+   * Represents a single field in an {@code UnknownFieldSet}.
+   *
+   * <p>A {@code Field} consists of five lists of values.  The lists correspond
+   * to the five "wire types" used in the protocol buffer binary format.
+   * The wire type of each field can be determined from the encoded form alone,
+   * without knowing the field's declared type.  So, we are able to parse
+   * unknown values at least this far and separate them.  Normally, only one
+   * of the five lists will contain any values, since it is impossible to
+   * define a valid message type that declares two different types for the
+   * same field number.  However, the code is designed to allow for the case
+   * where the same unknown field number is encountered using multiple different
+   * wire types.
+   *
+   * <p>{@code Field} is an immutable class.  To construct one, you must use a
+   * {@link Builder}.
+   *
+   * @see UnknownFieldSet
+   */
+  public static final class Field {
+    private Field() {}
+
+    /** Construct a new {@link Builder}. */
+    public static Builder newBuilder() {
+      return Builder.create();
+    }
+
+    /**
+     * Construct a new {@link Builder} and initialize it to a copy of
+     * {@code copyFrom}.
+     */
+    public static Builder newBuilder(final Field copyFrom) {
+      return newBuilder().mergeFrom(copyFrom);
+    }
+
+    /** Get an empty {@code Field}. */
+    public static Field getDefaultInstance() {
+      return fieldDefaultInstance;
+    }
+    private static final Field fieldDefaultInstance = newBuilder().build();
+
+    /** Get the list of varint values for this field. */
+    public List<Long> getVarintList()               { return varint;          }
+
+    /** Get the list of fixed32 values for this field. */
+    public List<Integer> getFixed32List()           { return fixed32;         }
+
+    /** Get the list of fixed64 values for this field. */
+    public List<Long> getFixed64List()              { return fixed64;         }
+
+    /** Get the list of length-delimited values for this field. */
+    public List<ByteString> getLengthDelimitedList() { return lengthDelimited; }
+
+    /**
+     * Get the list of embedded group values for this field.  These are
+     * represented using {@link UnknownFieldSet}s rather than {@link Message}s
+     * since the group's type is presumably unknown.
+     */
+    public List<UnknownFieldSet> getGroupList()      { return group;           }
+
+    @Override
+    public boolean equals(final Object other) {
+      if (this == other) {
+        return true;
+      }
+      if (!(other instanceof Field)) {
+        return false;
+      }
+      return Arrays.equals(getIdentityArray(),
+          ((Field) other).getIdentityArray());
+    }
+
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(getIdentityArray());
+    }
+
+    /**
+     * Returns the array of objects to be used to uniquely identify this
+     * {@link Field} instance.
+     */
+    private Object[] getIdentityArray() {
+      return new Object[] {
+          varint,
+          fixed32,
+          fixed64,
+          lengthDelimited,
+          group};
+    }
+
+    /**
+     * Serializes the field, including field number, and writes it to
+     * {@code output}.
+     */
+    public void writeTo(final int fieldNumber, final CodedOutputStream output)
+                        throws IOException {
+      for (final long value : varint) {
+        output.writeUInt64(fieldNumber, value);
+      }
+      for (final int value : fixed32) {
+        output.writeFixed32(fieldNumber, value);
+      }
+      for (final long value : fixed64) {
+        output.writeFixed64(fieldNumber, value);
+      }
+      for (final ByteString value : lengthDelimited) {
+        output.writeBytes(fieldNumber, value);
+      }
+      for (final UnknownFieldSet value : group) {
+        output.writeGroup(fieldNumber, value);
+      }
+    }
+
+    /**
+     * Get the number of bytes required to encode this field, including field
+     * number.
+     */
+    public int getSerializedSize(final int fieldNumber) {
+      int result = 0;
+      for (final long value : varint) {
+        result += CodedOutputStream.computeUInt64Size(fieldNumber, value);
+      }
+      for (final int value : fixed32) {
+        result += CodedOutputStream.computeFixed32Size(fieldNumber, value);
+      }
+      for (final long value : fixed64) {
+        result += CodedOutputStream.computeFixed64Size(fieldNumber, value);
+      }
+      for (final ByteString value : lengthDelimited) {
+        result += CodedOutputStream.computeBytesSize(fieldNumber, value);
+      }
+      for (final UnknownFieldSet value : group) {
+        result += CodedOutputStream.computeGroupSize(fieldNumber, value);
+      }
+      return result;
+    }
+
+    /**
+     * Serializes the field, including field number, and writes it to
+     * {@code output}, using {@code MessageSet} wire format.
+     */
+    public void writeAsMessageSetExtensionTo(
+        final int fieldNumber,
+        final CodedOutputStream output)
+        throws IOException {
+      for (final ByteString value : lengthDelimited) {
+        output.writeRawMessageSetExtension(fieldNumber, value);
+      }
+    }
+
+    /**
+     * Get the number of bytes required to encode this field, including field
+     * number, using {@code MessageSet} wire format.
+     */
+    public int getSerializedSizeAsMessageSetExtension(final int fieldNumber) {
+      int result = 0;
+      for (final ByteString value : lengthDelimited) {
+        result += CodedOutputStream.computeRawMessageSetExtensionSize(
+          fieldNumber, value);
+      }
+      return result;
+    }
+
+    private List<Long> varint;
+    private List<Integer> fixed32;
+    private List<Long> fixed64;
+    private List<ByteString> lengthDelimited;
+    private List<UnknownFieldSet> group;
+
+    /**
+     * Used to build a {@link Field} within an {@link UnknownFieldSet}.
+     *
+     * <p>Use {@link Field#newBuilder()} to construct a {@code Builder}.
+     */
+    public static final class Builder {
+      // This constructor should never be called directly (except from 'create').
+      private Builder() {}
+
+      private static Builder create() {
+        Builder builder = new Builder();
+        builder.result = new Field();
+        return builder;
+      }
+
+      private Field result;
+
+      /**
+       * Build the field.  After {@code build()} has been called, the
+       * {@code Builder} is no longer usable.  Calling any other method will
+       * result in undefined behavior and can cause a
+       * {@code NullPointerException} to be thrown.
+       */
+      public Field build() {
+        if (result.varint == null) {
+          result.varint = Collections.emptyList();
+        } else {
+          result.varint = Collections.unmodifiableList(result.varint);
+        }
+        if (result.fixed32 == null) {
+          result.fixed32 = Collections.emptyList();
+        } else {
+          result.fixed32 = Collections.unmodifiableList(result.fixed32);
+        }
+        if (result.fixed64 == null) {
+          result.fixed64 = Collections.emptyList();
+        } else {
+          result.fixed64 = Collections.unmodifiableList(result.fixed64);
+        }
+        if (result.lengthDelimited == null) {
+          result.lengthDelimited = Collections.emptyList();
+        } else {
+          result.lengthDelimited =
+            Collections.unmodifiableList(result.lengthDelimited);
+        }
+        if (result.group == null) {
+          result.group = Collections.emptyList();
+        } else {
+          result.group = Collections.unmodifiableList(result.group);
+        }
+
+        final Field returnMe = result;
+        result = null;
+        return returnMe;
+      }
+
+      /** Discard the field's contents. */
+      public Builder clear() {
+        result = new Field();
+        return this;
+      }
+
+      /**
+       * Merge the values in {@code other} into this field.  For each list
+       * of values, {@code other}'s values are append to the ones in this
+       * field.
+       */
+      public Builder mergeFrom(final Field other) {
+        if (!other.varint.isEmpty()) {
+          if (result.varint == null) {
+            result.varint = new ArrayList<Long>();
+          }
+          result.varint.addAll(other.varint);
+        }
+        if (!other.fixed32.isEmpty()) {
+          if (result.fixed32 == null) {
+            result.fixed32 = new ArrayList<Integer>();
+          }
+          result.fixed32.addAll(other.fixed32);
+        }
+        if (!other.fixed64.isEmpty()) {
+          if (result.fixed64 == null) {
+            result.fixed64 = new ArrayList<Long>();
+          }
+          result.fixed64.addAll(other.fixed64);
+        }
+        if (!other.lengthDelimited.isEmpty()) {
+          if (result.lengthDelimited == null) {
+            result.lengthDelimited = new ArrayList<ByteString>();
+          }
+          result.lengthDelimited.addAll(other.lengthDelimited);
+        }
+        if (!other.group.isEmpty()) {
+          if (result.group == null) {
+            result.group = new ArrayList<UnknownFieldSet>();
+          }
+          result.group.addAll(other.group);
+        }
+        return this;
+      }
+
+      /** Add a varint value. */
+      public Builder addVarint(final long value) {
+        if (result.varint == null) {
+          result.varint = new ArrayList<Long>();
+        }
+        result.varint.add(value);
+        return this;
+      }
+
+      /** Add a fixed32 value. */
+      public Builder addFixed32(final int value) {
+        if (result.fixed32 == null) {
+          result.fixed32 = new ArrayList<Integer>();
+        }
+        result.fixed32.add(value);
+        return this;
+      }
+
+      /** Add a fixed64 value. */
+      public Builder addFixed64(final long value) {
+        if (result.fixed64 == null) {
+          result.fixed64 = new ArrayList<Long>();
+        }
+        result.fixed64.add(value);
+        return this;
+      }
+
+      /** Add a length-delimited value. */
+      public Builder addLengthDelimited(final ByteString value) {
+        if (result.lengthDelimited == null) {
+          result.lengthDelimited = new ArrayList<ByteString>();
+        }
+        result.lengthDelimited.add(value);
+        return this;
+      }
+
+      /** Add an embedded group. */
+      public Builder addGroup(final UnknownFieldSet value) {
+        if (result.group == null) {
+          result.group = new ArrayList<UnknownFieldSet>();
+        }
+        result.group.add(value);
+        return this;
+      }
+    }
+  }
+
+  /**
+   * Parser to implement MessageLite interface.
+   */
+  public static final class Parser extends AbstractParser<UnknownFieldSet> {
+    @Override
+    public UnknownFieldSet parsePartialFrom(
+        CodedInputStream input, ExtensionRegistryLite extensionRegistry)
+        throws InvalidProtocolBufferException {
+      Builder builder = newBuilder();
+      try {
+        builder.mergeFrom(input);
+      } catch (InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(builder.buildPartial());
+      } catch (IOException e) {
+        throw new InvalidProtocolBufferException(e.getMessage())
+            .setUnfinishedMessage(builder.buildPartial());
+      }
+      return builder.buildPartial();
+    }
+  }
+
+  private static final Parser PARSER = new Parser();
+  @Override
+  public final Parser getParserForType() {
+    return PARSER;
+  }
+}