You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/10/04 05:16:32 UTC

[19/51] [partial] hbase git commit: HBASE-15638 Shade protobuf Which includes

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java
new file mode 100644
index 0000000..88effb2
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java
@@ -0,0 +1,273 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.DoubleList;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.RandomAccess;
+
+/**
+ * An implementation of {@link DoubleList} on top of a primitive array.
+ *
+ * @author dweis@google.com (Daniel Weis)
+ */
+final class DoubleArrayList
+    extends AbstractProtobufList<Double>
+    implements DoubleList, RandomAccess {
+
+  private static final DoubleArrayList EMPTY_LIST = new DoubleArrayList();
+  static {
+    EMPTY_LIST.makeImmutable();
+  }
+
+  public static DoubleArrayList emptyList() {
+    return EMPTY_LIST;
+  }
+
+  /**
+   * The backing store for the list.
+   */
+  private double[] array;
+
+  /**
+   * The size of the list distinct from the length of the array. That is, it is the number of
+   * elements set in the list.
+   */
+  private int size;
+
+  /**
+   * Constructs a new mutable {@code DoubleArrayList} with default capacity.
+   */
+  DoubleArrayList() {
+    this(new double[DEFAULT_CAPACITY], 0);
+  }
+
+  /**
+   * Constructs a new mutable {@code DoubleArrayList}
+   * containing the same elements as {@code other}.
+   */
+  private DoubleArrayList(double[] other, int size) {
+    array = other;
+    this.size = size;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (!(o instanceof DoubleArrayList)) {
+      return super.equals(o);
+    }
+    DoubleArrayList other = (DoubleArrayList) o;
+    if (size != other.size) {
+      return false;
+    }
+
+    final double[] arr = other.array;
+    for (int i = 0; i < size; i++) {
+      if (array[i] != arr[i]) {
+        return false;
+      }
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = 1;
+    for (int i = 0; i < size; i++) {
+      long bits = Double.doubleToLongBits(array[i]);
+      result = (31 * result) + Internal.hashLong(bits);
+    }
+    return result;
+  }
+
+  @Override
+  public DoubleList mutableCopyWithCapacity(int capacity) {
+    if (capacity < size) {
+      throw new IllegalArgumentException();
+    }
+    return new DoubleArrayList(Arrays.copyOf(array, capacity), size);
+  }
+
+  @Override
+  public Double get(int index) {
+    return getDouble(index);
+  }
+
+  @Override
+  public double getDouble(int index) {
+    ensureIndexInRange(index);
+    return array[index];
+  }
+
+  @Override
+  public int size() {
+    return size;
+  }
+
+  @Override
+  public Double set(int index, Double element) {
+    return setDouble(index, element);
+  }
+
+  @Override
+  public double setDouble(int index, double element) {
+    ensureIsMutable();
+    ensureIndexInRange(index);
+    double previousValue = array[index];
+    array[index] = element;
+    return previousValue;
+  }
+
+  @Override
+  public void add(int index, Double element) {
+    addDouble(index, element);
+  }
+
+  /**
+   * Like {@link #add(Double)} but more efficient in that it doesn't box the element.
+   */
+  @Override
+  public void addDouble(double element) {
+    addDouble(size, element);
+  }
+
+  /**
+   * Like {@link #add(int, Double)} but more efficient in that it doesn't box the element.
+   */
+  private void addDouble(int index, double element) {
+    ensureIsMutable();
+    if (index < 0 || index > size) {
+      throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
+    }
+
+    if (size < array.length) {
+      // Shift everything over to make room
+      System.arraycopy(array, index, array, index + 1, size - index);
+    } else {
+      // Resize to 1.5x the size
+      int length = ((size * 3) / 2) + 1;
+      double[] newArray = new double[length];
+
+      // Copy the first part directly
+      System.arraycopy(array, 0, newArray, 0, index);
+
+      // Copy the rest shifted over by one to make room
+      System.arraycopy(array, index, newArray, index + 1, size - index);
+      array = newArray;
+    }
+
+    array[index] = element;
+    size++;
+    modCount++;
+  }
+
+  @Override
+  public boolean addAll(Collection<? extends Double> collection) {
+    ensureIsMutable();
+
+    if (collection == null) {
+      throw new NullPointerException();
+    }
+
+    // We specialize when adding another DoubleArrayList to avoid boxing elements.
+    if (!(collection instanceof DoubleArrayList)) {
+      return super.addAll(collection);
+    }
+
+    DoubleArrayList list = (DoubleArrayList) collection;
+    if (list.size == 0) {
+      return false;
+    }
+
+    int overflow = Integer.MAX_VALUE - size;
+    if (overflow < list.size) {
+      // We can't actually represent a list this large.
+      throw new OutOfMemoryError();
+    }
+
+    int newSize = size + list.size;
+    if (newSize > array.length) {
+      array = Arrays.copyOf(array, newSize);
+    }
+
+    System.arraycopy(list.array, 0, array, size, list.size);
+    size = newSize;
+    modCount++;
+    return true;
+  }
+
+  @Override
+  public boolean remove(Object o) {
+    ensureIsMutable();
+    for (int i = 0; i < size; i++) {
+      if (o.equals(array[i])) {
+        System.arraycopy(array, i + 1, array, i, size - i);
+        size--;
+        modCount++;
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public Double remove(int index) {
+    ensureIsMutable();
+    ensureIndexInRange(index);
+    double value = array[index];
+    System.arraycopy(array, index + 1, array, index, size - index);
+    size--;
+    modCount++;
+    return value;
+  }
+
+  /**
+   * Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an
+   * {@link IndexOutOfBoundsException} if it is not.
+   *
+   * @param index the index to verify is in range
+   */
+  private void ensureIndexInRange(int index) {
+    if (index < 0 || index >= size) {
+      throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
+    }
+  }
+
+  private String makeOutOfBoundsExceptionMessage(int index) {
+    return "Index:" + index + ", Size:" + size;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java
new file mode 100644
index 0000000..2450436
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java
@@ -0,0 +1,454 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * Wrapper message for `double`.
+ * The JSON representation for `DoubleValue` is JSON number.
+ * </pre>
+ *
+ * Protobuf type {@code google.protobuf.DoubleValue}
+ */
+public  final class DoubleValue extends
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+    // @@protoc_insertion_point(message_implements:google.protobuf.DoubleValue)
+    DoubleValueOrBuilder {
+  // Use DoubleValue.newBuilder() to construct.
+  private DoubleValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+  private DoubleValue() {
+    value_ = 0D;
+  }
+
+  @java.lang.Override
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+  }
+  private DoubleValue(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    int mutable_bitField0_ = 0;
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          default: {
+            if (!input.skipField(tag)) {
+              done = true;
+            }
+            break;
+          }
+          case 9: {
+
+            value_ = input.readDouble();
+            break;
+          }
+        }
+      }
+    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+          e).setUnfinishedMessage(this);
+    } finally {
+      makeExtensionsImmutable();
+    }
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_descriptor;
+  }
+
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internalGetFieldAccessorTable() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_fieldAccessorTable
+        .ensureFieldAccessorsInitialized(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.Builder.class);
+  }
+
+  public static final int VALUE_FIELD_NUMBER = 1;
+  private double value_;
+  /**
+   * <pre>
+   * The double value.
+   * </pre>
+   *
+   * <code>optional double value = 1;</code>
+   */
+  public double getValue() {
+    return value_;
+  }
+
+  private byte memoizedIsInitialized = -1;
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                      throws java.io.IOException {
+    if (value_ != 0D) {
+      output.writeDouble(1, value_);
+    }
+  }
+
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    if (value_ != 0D) {
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+        .computeDoubleSize(1, value_);
+    }
+    memoizedSize = size;
+    return size;
+  }
+
+  private static final long serialVersionUID = 0L;
+  @java.lang.Override
+  public boolean equals(final java.lang.Object obj) {
+    if (obj == this) {
+     return true;
+    }
+    if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue)) {
+      return super.equals(obj);
+    }
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue) obj;
+
+    boolean result = true;
+    result = result && (
+        java.lang.Double.doubleToLongBits(getValue())
+        == java.lang.Double.doubleToLongBits(
+            other.getValue()));
+    return result;
+  }
+
+  @java.lang.Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptorForType().hashCode();
+    hash = (37 * hash) + VALUE_FIELD_NUMBER;
+    hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+        java.lang.Double.doubleToLongBits(getValue()));
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(byte[] data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
+      byte[] data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseDelimitedFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseDelimitedFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public Builder newBuilderForType() { return newBuilder(); }
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+  public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+        ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @java.lang.Override
+  protected Builder newBuilderForType(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+  /**
+   * <pre>
+   * Wrapper message for `double`.
+   * The JSON representation for `DoubleValue` is JSON number.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.DoubleValue}
+   */
+  public static final class Builder extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+      // @@protoc_insertion_point(builder_implements:google.protobuf.DoubleValue)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValueOrBuilder {
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.Builder.class);
+    }
+
+    // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+    private void maybeForceBuilderInitialization() {
+      if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+    public Builder clear() {
+      super.clear();
+      value_ = 0D;
+
+      return this;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptorForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_descriptor;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue getDefaultInstanceForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.getDefaultInstance();
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue build() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue buildPartial() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue(this);
+      result.value_ = value_;
+      onBuilt();
+      return result;
+    }
+
+    public Builder clone() {
+      return (Builder) super.clone();
+    }
+    public Builder setField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.setField(field, value);
+    }
+    public Builder clearField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return (Builder) super.clearField(field);
+    }
+    public Builder clearOneof(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return (Builder) super.clearOneof(oneof);
+    }
+    public Builder setRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        int index, Object value) {
+      return (Builder) super.setRepeatedField(field, index, value);
+    }
+    public Builder addRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.addRepeatedField(field, value);
+    }
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+      if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue) {
+        return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue)other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue other) {
+      if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.getDefaultInstance()) return this;
+      if (other.getValue() != 0D) {
+        setValue(other.getValue());
+      }
+      onChanged();
+      return this;
+    }
+
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    public Builder mergeFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+
+    private double value_ ;
+    /**
+     * <pre>
+     * The double value.
+     * </pre>
+     *
+     * <code>optional double value = 1;</code>
+     */
+    public double getValue() {
+      return value_;
+    }
+    /**
+     * <pre>
+     * The double value.
+     * </pre>
+     *
+     * <code>optional double value = 1;</code>
+     */
+    public Builder setValue(double value) {
+      
+      value_ = value;
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * The double value.
+     * </pre>
+     *
+     * <code>optional double value = 1;</code>
+     */
+    public Builder clearValue() {
+      
+      value_ = 0D;
+      onChanged();
+      return this;
+    }
+    public final Builder setUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+    public final Builder mergeUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:google.protobuf.DoubleValue)
+  }
+
+  // @@protoc_insertion_point(class_scope:google.protobuf.DoubleValue)
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue DEFAULT_INSTANCE;
+  static {
+    DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue();
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleValue>
+      PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<DoubleValue>() {
+    public DoubleValue parsePartialFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+        return new DoubleValue(input, extensionRegistry);
+    }
+  };
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleValue> parser() {
+    return PARSER;
+  }
+
+  @java.lang.Override
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleValue> getParserForType() {
+    return PARSER;
+  }
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValueOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValueOrBuilder.java
new file mode 100644
index 0000000..5bd1a3f
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValueOrBuilder.java
@@ -0,0 +1,18 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public interface DoubleValueOrBuilder extends
+    // @@protoc_insertion_point(interface_extends:google.protobuf.DoubleValue)
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <pre>
+   * The double value.
+   * </pre>
+   *
+   * <code>optional double value = 1;</code>
+   */
+  double getValue();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java
new file mode 100644
index 0000000..a675f7b
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java
@@ -0,0 +1,618 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/duration.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * A Duration represents a signed, fixed-length span of time represented
+ * as a count of seconds and fractions of seconds at nanosecond
+ * resolution. It is independent of any calendar and concepts like "day"
+ * or "month". It is related to Timestamp in that the difference between
+ * two Timestamp values is a Duration and it can be added or subtracted
+ * from a Timestamp. Range is approximately +-10,000 years.
+ * Example 1: Compute Duration from two Timestamps in pseudo code.
+ *     Timestamp start = ...;
+ *     Timestamp end = ...;
+ *     Duration duration = ...;
+ *     duration.seconds = end.seconds - start.seconds;
+ *     duration.nanos = end.nanos - start.nanos;
+ *     if (duration.seconds &lt; 0 &amp;&amp; duration.nanos &gt; 0) {
+ *       duration.seconds += 1;
+ *       duration.nanos -= 1000000000;
+ *     } else if (durations.seconds &gt; 0 &amp;&amp; duration.nanos &lt; 0) {
+ *       duration.seconds -= 1;
+ *       duration.nanos += 1000000000;
+ *     }
+ * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+ *     Timestamp start = ...;
+ *     Duration duration = ...;
+ *     Timestamp end = ...;
+ *     end.seconds = start.seconds + duration.seconds;
+ *     end.nanos = start.nanos + duration.nanos;
+ *     if (end.nanos &lt; 0) {
+ *       end.seconds -= 1;
+ *       end.nanos += 1000000000;
+ *     } else if (end.nanos &gt;= 1000000000) {
+ *       end.seconds += 1;
+ *       end.nanos -= 1000000000;
+ *     }
+ * Example 3: Compute Duration from datetime.timedelta in Python.
+ *     td = datetime.timedelta(days=3, minutes=10)
+ *     duration = Duration()
+ *     duration.FromTimedelta(td)
+ * </pre>
+ *
+ * Protobuf type {@code google.protobuf.Duration}
+ */
+public  final class Duration extends
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+    // @@protoc_insertion_point(message_implements:google.protobuf.Duration)
+    DurationOrBuilder {
+  // Use Duration.newBuilder() to construct.
+  private Duration(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+  private Duration() {
+    seconds_ = 0L;
+    nanos_ = 0;
+  }
+
+  @java.lang.Override
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+  }
+  private Duration(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    int mutable_bitField0_ = 0;
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          default: {
+            if (!input.skipField(tag)) {
+              done = true;
+            }
+            break;
+          }
+          case 8: {
+
+            seconds_ = input.readInt64();
+            break;
+          }
+          case 16: {
+
+            nanos_ = input.readInt32();
+            break;
+          }
+        }
+      }
+    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+          e).setUnfinishedMessage(this);
+    } finally {
+      makeExtensionsImmutable();
+    }
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_descriptor;
+  }
+
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internalGetFieldAccessorTable() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_fieldAccessorTable
+        .ensureFieldAccessorsInitialized(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.Builder.class);
+  }
+
+  public static final int SECONDS_FIELD_NUMBER = 1;
+  private long seconds_;
+  /**
+   * <pre>
+   * Signed seconds of the span of time. Must be from -315,576,000,000
+   * to +315,576,000,000 inclusive.
+   * </pre>
+   *
+   * <code>optional int64 seconds = 1;</code>
+   */
+  public long getSeconds() {
+    return seconds_;
+  }
+
+  public static final int NANOS_FIELD_NUMBER = 2;
+  private int nanos_;
+  /**
+   * <pre>
+   * Signed fractions of a second at nanosecond resolution of the span
+   * of time. Durations less than one second are represented with a 0
+   * `seconds` field and a positive or negative `nanos` field. For durations
+   * of one second or more, a non-zero value for the `nanos` field must be
+   * of the same sign as the `seconds` field. Must be from -999,999,999
+   * to +999,999,999 inclusive.
+   * </pre>
+   *
+   * <code>optional int32 nanos = 2;</code>
+   */
+  public int getNanos() {
+    return nanos_;
+  }
+
+  private byte memoizedIsInitialized = -1;
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                      throws java.io.IOException {
+    if (seconds_ != 0L) {
+      output.writeInt64(1, seconds_);
+    }
+    if (nanos_ != 0) {
+      output.writeInt32(2, nanos_);
+    }
+  }
+
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    if (seconds_ != 0L) {
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+        .computeInt64Size(1, seconds_);
+    }
+    if (nanos_ != 0) {
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+        .computeInt32Size(2, nanos_);
+    }
+    memoizedSize = size;
+    return size;
+  }
+
+  private static final long serialVersionUID = 0L;
+  @java.lang.Override
+  public boolean equals(final java.lang.Object obj) {
+    if (obj == this) {
+     return true;
+    }
+    if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration)) {
+      return super.equals(obj);
+    }
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration) obj;
+
+    boolean result = true;
+    result = result && (getSeconds()
+        == other.getSeconds());
+    result = result && (getNanos()
+        == other.getNanos());
+    return result;
+  }
+
+  @java.lang.Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptorForType().hashCode();
+    hash = (37 * hash) + SECONDS_FIELD_NUMBER;
+    hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
+        getSeconds());
+    hash = (37 * hash) + NANOS_FIELD_NUMBER;
+    hash = (53 * hash) + getNanos();
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(byte[] data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
+      byte[] data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseDelimitedFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseDelimitedFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public Builder newBuilderForType() { return newBuilder(); }
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+  public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+        ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @java.lang.Override
+  protected Builder newBuilderForType(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+  /**
+   * <pre>
+   * A Duration represents a signed, fixed-length span of time represented
+   * as a count of seconds and fractions of seconds at nanosecond
+   * resolution. It is independent of any calendar and concepts like "day"
+   * or "month". It is related to Timestamp in that the difference between
+   * two Timestamp values is a Duration and it can be added or subtracted
+   * from a Timestamp. Range is approximately +-10,000 years.
+   * Example 1: Compute Duration from two Timestamps in pseudo code.
+   *     Timestamp start = ...;
+   *     Timestamp end = ...;
+   *     Duration duration = ...;
+   *     duration.seconds = end.seconds - start.seconds;
+   *     duration.nanos = end.nanos - start.nanos;
+   *     if (duration.seconds &lt; 0 &amp;&amp; duration.nanos &gt; 0) {
+   *       duration.seconds += 1;
+   *       duration.nanos -= 1000000000;
+   *     } else if (durations.seconds &gt; 0 &amp;&amp; duration.nanos &lt; 0) {
+   *       duration.seconds -= 1;
+   *       duration.nanos += 1000000000;
+   *     }
+   * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+   *     Timestamp start = ...;
+   *     Duration duration = ...;
+   *     Timestamp end = ...;
+   *     end.seconds = start.seconds + duration.seconds;
+   *     end.nanos = start.nanos + duration.nanos;
+   *     if (end.nanos &lt; 0) {
+   *       end.seconds -= 1;
+   *       end.nanos += 1000000000;
+   *     } else if (end.nanos &gt;= 1000000000) {
+   *       end.seconds += 1;
+   *       end.nanos -= 1000000000;
+   *     }
+   * Example 3: Compute Duration from datetime.timedelta in Python.
+   *     td = datetime.timedelta(days=3, minutes=10)
+   *     duration = Duration()
+   *     duration.FromTimedelta(td)
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.Duration}
+   */
+  public static final class Builder extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+      // @@protoc_insertion_point(builder_implements:google.protobuf.Duration)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationOrBuilder {
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.Builder.class);
+    }
+
+    // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+    private void maybeForceBuilderInitialization() {
+      if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+    public Builder clear() {
+      super.clear();
+      seconds_ = 0L;
+
+      nanos_ = 0;
+
+      return this;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptorForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_descriptor;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration getDefaultInstanceForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.getDefaultInstance();
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration build() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration buildPartial() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration(this);
+      result.seconds_ = seconds_;
+      result.nanos_ = nanos_;
+      onBuilt();
+      return result;
+    }
+
+    public Builder clone() {
+      return (Builder) super.clone();
+    }
+    public Builder setField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.setField(field, value);
+    }
+    public Builder clearField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return (Builder) super.clearField(field);
+    }
+    public Builder clearOneof(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return (Builder) super.clearOneof(oneof);
+    }
+    public Builder setRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        int index, Object value) {
+      return (Builder) super.setRepeatedField(field, index, value);
+    }
+    public Builder addRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.addRepeatedField(field, value);
+    }
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+      if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration) {
+        return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration)other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration other) {
+      if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.getDefaultInstance()) return this;
+      if (other.getSeconds() != 0L) {
+        setSeconds(other.getSeconds());
+      }
+      if (other.getNanos() != 0) {
+        setNanos(other.getNanos());
+      }
+      onChanged();
+      return this;
+    }
+
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    public Builder mergeFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+
+    private long seconds_ ;
+    /**
+     * <pre>
+     * Signed seconds of the span of time. Must be from -315,576,000,000
+     * to +315,576,000,000 inclusive.
+     * </pre>
+     *
+     * <code>optional int64 seconds = 1;</code>
+     */
+    public long getSeconds() {
+      return seconds_;
+    }
+    /**
+     * <pre>
+     * Signed seconds of the span of time. Must be from -315,576,000,000
+     * to +315,576,000,000 inclusive.
+     * </pre>
+     *
+     * <code>optional int64 seconds = 1;</code>
+     */
+    public Builder setSeconds(long value) {
+      
+      seconds_ = value;
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * Signed seconds of the span of time. Must be from -315,576,000,000
+     * to +315,576,000,000 inclusive.
+     * </pre>
+     *
+     * <code>optional int64 seconds = 1;</code>
+     */
+    public Builder clearSeconds() {
+      
+      seconds_ = 0L;
+      onChanged();
+      return this;
+    }
+
+    private int nanos_ ;
+    /**
+     * <pre>
+     * Signed fractions of a second at nanosecond resolution of the span
+     * of time. Durations less than one second are represented with a 0
+     * `seconds` field and a positive or negative `nanos` field. For durations
+     * of one second or more, a non-zero value for the `nanos` field must be
+     * of the same sign as the `seconds` field. Must be from -999,999,999
+     * to +999,999,999 inclusive.
+     * </pre>
+     *
+     * <code>optional int32 nanos = 2;</code>
+     */
+    public int getNanos() {
+      return nanos_;
+    }
+    /**
+     * <pre>
+     * Signed fractions of a second at nanosecond resolution of the span
+     * of time. Durations less than one second are represented with a 0
+     * `seconds` field and a positive or negative `nanos` field. For durations
+     * of one second or more, a non-zero value for the `nanos` field must be
+     * of the same sign as the `seconds` field. Must be from -999,999,999
+     * to +999,999,999 inclusive.
+     * </pre>
+     *
+     * <code>optional int32 nanos = 2;</code>
+     */
+    public Builder setNanos(int value) {
+      
+      nanos_ = value;
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * Signed fractions of a second at nanosecond resolution of the span
+     * of time. Durations less than one second are represented with a 0
+     * `seconds` field and a positive or negative `nanos` field. For durations
+     * of one second or more, a non-zero value for the `nanos` field must be
+     * of the same sign as the `seconds` field. Must be from -999,999,999
+     * to +999,999,999 inclusive.
+     * </pre>
+     *
+     * <code>optional int32 nanos = 2;</code>
+     */
+    public Builder clearNanos() {
+      
+      nanos_ = 0;
+      onChanged();
+      return this;
+    }
+    public final Builder setUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+    public final Builder mergeUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:google.protobuf.Duration)
+  }
+
+  // @@protoc_insertion_point(class_scope:google.protobuf.Duration)
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration DEFAULT_INSTANCE;
+  static {
+    DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration();
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Duration>
+      PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Duration>() {
+    public Duration parsePartialFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+        return new Duration(input, extensionRegistry);
+    }
+  };
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Duration> parser() {
+    return PARSER;
+  }
+
+  @java.lang.Override
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Duration> getParserForType() {
+    return PARSER;
+  }
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java
new file mode 100644
index 0000000..245f6b9
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java
@@ -0,0 +1,33 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/duration.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public interface DurationOrBuilder extends
+    // @@protoc_insertion_point(interface_extends:google.protobuf.Duration)
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <pre>
+   * Signed seconds of the span of time. Must be from -315,576,000,000
+   * to +315,576,000,000 inclusive.
+   * </pre>
+   *
+   * <code>optional int64 seconds = 1;</code>
+   */
+  long getSeconds();
+
+  /**
+   * <pre>
+   * Signed fractions of a second at nanosecond resolution of the span
+   * of time. Durations less than one second are represented with a 0
+   * `seconds` field and a positive or negative `nanos` field. For durations
+   * of one second or more, a non-zero value for the `nanos` field must be
+   * of the same sign as the `seconds` field. Must be from -999,999,999
+   * to +999,999,999 inclusive.
+   * </pre>
+   *
+   * <code>optional int32 nanos = 2;</code>
+   */
+  int getNanos();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationProto.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationProto.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationProto.java
new file mode 100644
index 0000000..ee379ea
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationProto.java
@@ -0,0 +1,59 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/duration.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public final class DurationProto {
+  private DurationProto() {}
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
+  }
+  static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+    internal_static_google_protobuf_Duration_descriptor;
+  static final 
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_protobuf_Duration_fieldAccessorTable;
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static  org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\036google/protobuf/duration.proto\022\017google" +
+      ".protobuf\"*\n\010Duration\022\017\n\007seconds\030\001 \001(\003\022\r" +
+      "\n\005nanos\030\002 \001(\005B|\n\023com.google.protobufB\rDu" +
+      "rationProtoP\001Z*github.com/golang/protobu" +
+      "f/ptypes/duration\370\001\001\242\002\003GPB\252\002\036Google.Prot" +
+      "obuf.WellKnownTypesb\006proto3"
+    };
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
+          public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
+            descriptor = root;
+            return null;
+          }
+        };
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+    internal_static_google_protobuf_Duration_descriptor =
+      getDescriptor().getMessageTypes().get(0);
+    internal_static_google_protobuf_Duration_fieldAccessorTable = new
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_google_protobuf_Duration_descriptor,
+        new java.lang.String[] { "Seconds", "Nanos", });
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/95c1dc93/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java
new file mode 100644
index 0000000..110f2a9
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java
@@ -0,0 +1,684 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An implementation of {@link Message} that can represent arbitrary types,
+ * given a {@link Descriptors.Descriptor}.
+ *
+ * @author kenton@google.com Kenton Varda
+ */
+public final class DynamicMessage extends AbstractMessage {
+  private final Descriptor type;
+  private final FieldSet<FieldDescriptor> fields;
+  private final FieldDescriptor[] oneofCases;
+  private final UnknownFieldSet unknownFields;
+  private int memoizedSize = -1;
+
+  /**
+   * Construct a {@code DynamicMessage} using the given {@code FieldSet}.
+   * oneofCases stores the FieldDescriptor for each oneof to indicate
+   * which field is set. Caller should make sure the array is immutable.
+   *
+   * This constructor is package private and will be used in
+   * {@code DynamicMutableMessage} to convert a mutable message to an immutable
+   * message.
+   */
+  DynamicMessage(Descriptor type, FieldSet<FieldDescriptor> fields,
+                 FieldDescriptor[] oneofCases,
+                 UnknownFieldSet unknownFields) {
+    this.type = type;
+    this.fields = fields;
+    this.oneofCases = oneofCases;
+    this.unknownFields = unknownFields;
+  }
+
+  /**
+   * Get a {@code DynamicMessage} representing the default instance of the
+   * given type.
+   */
+  public static DynamicMessage getDefaultInstance(Descriptor type) {
+    int oneofDeclCount = type.toProto().getOneofDeclCount();
+    FieldDescriptor[] oneofCases = new FieldDescriptor[oneofDeclCount];
+    return new DynamicMessage(type, FieldSet.<FieldDescriptor>emptySet(),
+                              oneofCases,
+                              UnknownFieldSet.getDefaultInstance());
+  }
+
+
+  /** Parse a message of the given type from the given input stream. */
+  public static DynamicMessage parseFrom(Descriptor type,
+                                         CodedInputStream input)
+                                         throws IOException {
+    return newBuilder(type).mergeFrom(input).buildParsed();
+  }
+
+  /** Parse a message of the given type from the given input stream. */
+  public static DynamicMessage parseFrom(
+      Descriptor type,
+      CodedInputStream input,
+      ExtensionRegistry extensionRegistry)
+      throws IOException {
+    return newBuilder(type).mergeFrom(input, extensionRegistry).buildParsed();
+  }
+
+  /** Parse {@code data} as a message of the given type and return it. */
+  public static DynamicMessage parseFrom(Descriptor type, ByteString data)
+                                         throws InvalidProtocolBufferException {
+    return newBuilder(type).mergeFrom(data).buildParsed();
+  }
+
+  /** Parse {@code data} as a message of the given type and return it. */
+  public static DynamicMessage parseFrom(Descriptor type, ByteString data,
+                                         ExtensionRegistry extensionRegistry)
+                                         throws InvalidProtocolBufferException {
+    return newBuilder(type).mergeFrom(data, extensionRegistry).buildParsed();
+  }
+
+  /** Parse {@code data} as a message of the given type and return it. */
+  public static DynamicMessage parseFrom(Descriptor type, byte[] data)
+                                         throws InvalidProtocolBufferException {
+    return newBuilder(type).mergeFrom(data).buildParsed();
+  }
+
+  /** Parse {@code data} as a message of the given type and return it. */
+  public static DynamicMessage parseFrom(Descriptor type, byte[] data,
+                                         ExtensionRegistry extensionRegistry)
+                                         throws InvalidProtocolBufferException {
+    return newBuilder(type).mergeFrom(data, extensionRegistry).buildParsed();
+  }
+
+  /** Parse a message of the given type from {@code input} and return it. */
+  public static DynamicMessage parseFrom(Descriptor type, InputStream input)
+                                         throws IOException {
+    return newBuilder(type).mergeFrom(input).buildParsed();
+  }
+
+  /** Parse a message of the given type from {@code input} and return it. */
+  public static DynamicMessage parseFrom(Descriptor type, InputStream input,
+                                         ExtensionRegistry extensionRegistry)
+                                         throws IOException {
+    return newBuilder(type).mergeFrom(input, extensionRegistry).buildParsed();
+  }
+
+  /** Construct a {@link Message.Builder} for the given type. */
+  public static Builder newBuilder(Descriptor type) {
+    return new Builder(type);
+  }
+
+  /**
+   * Construct a {@link Message.Builder} for a message of the same type as
+   * {@code prototype}, and initialize it with {@code prototype}'s contents.
+   */
+  public static Builder newBuilder(Message prototype) {
+    return new Builder(prototype.getDescriptorForType()).mergeFrom(prototype);
+  }
+
+  // -----------------------------------------------------------------
+  // Implementation of Message interface.
+
+  @Override
+  public Descriptor getDescriptorForType() {
+    return type;
+  }
+
+  @Override
+  public DynamicMessage getDefaultInstanceForType() {
+    return getDefaultInstance(type);
+  }
+
+  @Override
+  public Map<FieldDescriptor, Object> getAllFields() {
+    return fields.getAllFields();
+  }
+
+  @Override
+  public boolean hasOneof(OneofDescriptor oneof) {
+    verifyOneofContainingType(oneof);
+    FieldDescriptor field = oneofCases[oneof.getIndex()];
+    if (field == null) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) {
+    verifyOneofContainingType(oneof);
+    return oneofCases[oneof.getIndex()];
+  }
+
+  @Override
+  public boolean hasField(FieldDescriptor field) {
+    verifyContainingType(field);
+    return fields.hasField(field);
+  }
+
+  @Override
+  public Object getField(FieldDescriptor field) {
+    verifyContainingType(field);
+    Object result = fields.getField(field);
+    if (result == null) {
+      if (field.isRepeated()) {
+        result = Collections.emptyList();
+      } else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
+        result = getDefaultInstance(field.getMessageType());
+      } else {
+        result = field.getDefaultValue();
+      }
+    }
+    return result;
+  }
+
+  @Override
+  public int getRepeatedFieldCount(FieldDescriptor field) {
+    verifyContainingType(field);
+    return fields.getRepeatedFieldCount(field);
+  }
+
+  @Override
+  public Object getRepeatedField(FieldDescriptor field, int index) {
+    verifyContainingType(field);
+    return fields.getRepeatedField(field, index);
+  }
+
+  @Override
+  public UnknownFieldSet getUnknownFields() {
+    return unknownFields;
+  }
+
+  static boolean isInitialized(Descriptor type,
+                               FieldSet<FieldDescriptor> fields) {
+    // Check that all required fields are present.
+    for (final FieldDescriptor field : type.getFields()) {
+      if (field.isRequired()) {
+        if (!fields.hasField(field)) {
+          return false;
+        }
+      }
+    }
+
+    // Check that embedded messages are initialized.
+    return fields.isInitialized();
+  }
+
+  @Override
+  public boolean isInitialized() {
+    return isInitialized(type, fields);
+  }
+
+  @Override
+  public void writeTo(CodedOutputStream output) throws IOException {
+    if (type.getOptions().getMessageSetWireFormat()) {
+      fields.writeMessageSetTo(output);
+      unknownFields.writeAsMessageSetTo(output);
+    } else {
+      fields.writeTo(output);
+      unknownFields.writeTo(output);
+    }
+  }
+
+  @Override
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    if (type.getOptions().getMessageSetWireFormat()) {
+      size = fields.getMessageSetSerializedSize();
+      size += unknownFields.getSerializedSizeAsMessageSet();
+    } else {
+      size = fields.getSerializedSize();
+      size += unknownFields.getSerializedSize();
+    }
+
+    memoizedSize = size;
+    return size;
+  }
+
+  @Override
+  public Builder newBuilderForType() {
+    return new Builder(type);
+  }
+
+  @Override
+  public Builder toBuilder() {
+    return newBuilderForType().mergeFrom(this);
+  }
+
+  @Override
+  public Parser<DynamicMessage> getParserForType() {
+    return new AbstractParser<DynamicMessage>() {
+      @Override
+      public DynamicMessage parsePartialFrom(
+          CodedInputStream input, ExtensionRegistryLite extensionRegistry)
+          throws InvalidProtocolBufferException {
+        Builder builder = newBuilder(type);
+        try {
+          builder.mergeFrom(input, extensionRegistry);
+        } catch (InvalidProtocolBufferException e) {
+          throw e.setUnfinishedMessage(builder.buildPartial());
+        } catch (IOException e) {
+          throw new InvalidProtocolBufferException(e.getMessage())
+              .setUnfinishedMessage(builder.buildPartial());
+        }
+        return builder.buildPartial();
+      }
+    };
+  }
+
+  /** Verifies that the field is a field of this message. */
+  private void verifyContainingType(FieldDescriptor field) {
+    if (field.getContainingType() != type) {
+      throw new IllegalArgumentException(
+        "FieldDescriptor does not match message type.");
+    }
+  }
+
+  /** Verifies that the oneof is an oneof of this message. */
+  private void verifyOneofContainingType(OneofDescriptor oneof) {
+    if (oneof.getContainingType() != type) {
+      throw new IllegalArgumentException(
+        "OneofDescriptor does not match message type.");
+    }
+  }
+
+  // =================================================================
+
+  /**
+   * Builder for {@link DynamicMessage}s.
+   */
+  public static final class Builder extends AbstractMessage.Builder<Builder> {
+    private final Descriptor type;
+    private FieldSet<FieldDescriptor> fields;
+    private final FieldDescriptor[] oneofCases;
+    private UnknownFieldSet unknownFields;
+
+    /** Construct a {@code Builder} for the given type. */
+    private Builder(Descriptor type) {
+      this.type = type;
+      this.fields = FieldSet.newFieldSet();
+      this.unknownFields = UnknownFieldSet.getDefaultInstance();
+      this.oneofCases = new FieldDescriptor[type.toProto().getOneofDeclCount()];
+    }
+
+    // ---------------------------------------------------------------
+    // Implementation of Message.Builder interface.
+
+    @Override
+    public Builder clear() {
+      if (fields.isImmutable()) {
+        fields = FieldSet.newFieldSet();
+      } else {
+        fields.clear();
+      }
+      unknownFields = UnknownFieldSet.getDefaultInstance();
+      return this;
+    }
+
+    @Override
+    public Builder mergeFrom(Message other) {
+      if (other instanceof DynamicMessage) {
+        // This should be somewhat faster than calling super.mergeFrom().
+        DynamicMessage otherDynamicMessage = (DynamicMessage) other;
+        if (otherDynamicMessage.type != type) {
+          throw new IllegalArgumentException(
+            "mergeFrom(Message) can only merge messages of the same type.");
+        }
+        ensureIsMutable();
+        fields.mergeFrom(otherDynamicMessage.fields);
+        mergeUnknownFields(otherDynamicMessage.unknownFields);
+        for (int i = 0; i < oneofCases.length; i++) {
+          if (oneofCases[i] == null) {
+            oneofCases[i] = otherDynamicMessage.oneofCases[i];
+          } else {
+            if ((otherDynamicMessage.oneofCases[i] != null)
+                && (oneofCases[i] != otherDynamicMessage.oneofCases[i])) {
+              fields.clearField(oneofCases[i]);
+              oneofCases[i] = otherDynamicMessage.oneofCases[i];
+            }
+          }
+        }
+        return this;
+      } else {
+        return super.mergeFrom(other);
+      }
+    }
+
+    @Override
+    public DynamicMessage build() {
+      if (!isInitialized()) {
+        throw newUninitializedMessageException(
+          new DynamicMessage(type, fields,
+              java.util.Arrays.copyOf(oneofCases, oneofCases.length), unknownFields));
+      }
+      return buildPartial();
+    }
+
+    /**
+     * Helper for DynamicMessage.parseFrom() methods to call.  Throws
+     * {@link InvalidProtocolBufferException} instead of
+     * {@link UninitializedMessageException}.
+     */
+    private DynamicMessage buildParsed() throws InvalidProtocolBufferException {
+      if (!isInitialized()) {
+        throw newUninitializedMessageException(
+          new DynamicMessage(type, fields,
+              java.util.Arrays.copyOf(oneofCases, oneofCases.length), unknownFields))
+          .asInvalidProtocolBufferException();
+      }
+      return buildPartial();
+    }
+
+    @Override
+    public DynamicMessage buildPartial() {
+      fields.makeImmutable();
+      DynamicMessage result =
+        new DynamicMessage(type, fields,
+            java.util.Arrays.copyOf(oneofCases, oneofCases.length), unknownFields);
+      return result;
+    }
+
+    @Override
+    public Builder clone() {
+      Builder result = new Builder(type);
+      result.fields.mergeFrom(fields);
+      result.mergeUnknownFields(unknownFields);
+      System.arraycopy(oneofCases, 0, result.oneofCases, 0 , oneofCases.length);
+      return result;
+    }
+
+    @Override
+    public boolean isInitialized() {
+      return DynamicMessage.isInitialized(type, fields);
+    }
+
+    @Override
+    public Descriptor getDescriptorForType() {
+      return type;
+    }
+
+    @Override
+    public DynamicMessage getDefaultInstanceForType() {
+      return getDefaultInstance(type);
+    }
+
+    @Override
+    public Map<FieldDescriptor, Object> getAllFields() {
+      return fields.getAllFields();
+    }
+
+    @Override
+    public Builder newBuilderForField(FieldDescriptor field) {
+      verifyContainingType(field);
+
+      if (field.getJavaType() != FieldDescriptor.JavaType.MESSAGE) {
+        throw new IllegalArgumentException(
+          "newBuilderForField is only valid for fields with message type.");
+      }
+
+      return new Builder(field.getMessageType());
+    }
+
+    @Override
+    public boolean hasOneof(OneofDescriptor oneof) {
+      verifyOneofContainingType(oneof);
+      FieldDescriptor field = oneofCases[oneof.getIndex()];
+      if (field == null) {
+        return false;
+      }
+      return true;
+    }
+
+    @Override
+    public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) {
+      verifyOneofContainingType(oneof);
+      return oneofCases[oneof.getIndex()];
+    }
+
+    @Override
+    public Builder clearOneof(OneofDescriptor oneof) {
+      verifyOneofContainingType(oneof);
+      FieldDescriptor field = oneofCases[oneof.getIndex()];
+      if (field != null) {
+        clearField(field);
+      }
+      return this;
+    }
+
+    @Override
+    public boolean hasField(FieldDescriptor field) {
+      verifyContainingType(field);
+      return fields.hasField(field);
+    }
+
+    @Override
+    public Object getField(FieldDescriptor field) {
+      verifyContainingType(field);
+      Object result = fields.getField(field);
+      if (result == null) {
+        if (field.isRepeated()) {
+          result = Collections.emptyList();
+        } else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
+          result = getDefaultInstance(field.getMessageType());
+        } else {
+          result = field.getDefaultValue();
+        }
+      }
+      return result;
+    }
+
+    @Override
+    public Builder setField(FieldDescriptor field, Object value) {
+      verifyContainingType(field);
+      ensureIsMutable();
+      // TODO(xiaofeng): This check should really be put in FieldSet.setField()
+      // where all other such checks are done. However, currently
+      // FieldSet.setField() permits Integer value for enum fields probably
+      // because of some internal features we support. Should figure it out
+      // and move this check to a more appropriate place.
+      if (field.getType() == FieldDescriptor.Type.ENUM) {
+        ensureEnumValueDescriptor(field, value);
+      }
+      OneofDescriptor oneofDescriptor = field.getContainingOneof();
+      if (oneofDescriptor != null) {
+        int index = oneofDescriptor.getIndex();
+        FieldDescriptor oldField = oneofCases[index];
+        if ((oldField != null) && (oldField != field)) {
+          fields.clearField(oldField);
+        }
+        oneofCases[index] = field;
+      } else if (field.getFile().getSyntax() == Descriptors.FileDescriptor.Syntax.PROTO3) {
+        if (!field.isRepeated()
+            && field.getJavaType() != FieldDescriptor.JavaType.MESSAGE
+            && value.equals(field.getDefaultValue())) {
+          // In proto3, setting a field to its default value is equivalent to clearing the field.
+          fields.clearField(field);
+          return this;
+        }
+      }
+      fields.setField(field, value);
+      return this;
+    }
+
+    @Override
+    public Builder clearField(FieldDescriptor field) {
+      verifyContainingType(field);
+      ensureIsMutable();
+      OneofDescriptor oneofDescriptor = field.getContainingOneof();
+      if (oneofDescriptor != null) {
+        int index = oneofDescriptor.getIndex();
+        if (oneofCases[index] == field) {
+          oneofCases[index] = null;
+        }
+      }
+      fields.clearField(field);
+      return this;
+    }
+
+    @Override
+    public int getRepeatedFieldCount(FieldDescriptor field) {
+      verifyContainingType(field);
+      return fields.getRepeatedFieldCount(field);
+    }
+
+    @Override
+    public Object getRepeatedField(FieldDescriptor field, int index) {
+      verifyContainingType(field);
+      return fields.getRepeatedField(field, index);
+    }
+
+    @Override
+    public Builder setRepeatedField(FieldDescriptor field, int index, Object value) {
+      verifyContainingType(field);
+      ensureIsMutable();
+      fields.setRepeatedField(field, index, value);
+      return this;
+    }
+
+    @Override
+    public Builder addRepeatedField(FieldDescriptor field, Object value) {
+      verifyContainingType(field);
+      ensureIsMutable();
+      fields.addRepeatedField(field, value);
+      return this;
+    }
+
+    @Override
+    public UnknownFieldSet getUnknownFields() {
+      return unknownFields;
+    }
+
+    @Override
+    public Builder setUnknownFields(UnknownFieldSet unknownFields) {
+      if (getDescriptorForType().getFile().getSyntax()
+          == Descriptors.FileDescriptor.Syntax.PROTO3) {
+        // Proto3 discards unknown fields.
+        return this;
+      }
+      this.unknownFields = unknownFields;
+      return this;
+    }
+
+    @Override
+    public Builder mergeUnknownFields(UnknownFieldSet unknownFields) {
+      if (getDescriptorForType().getFile().getSyntax()
+          == Descriptors.FileDescriptor.Syntax.PROTO3) {
+        // Proto3 discards unknown fields.
+        return this;
+      }
+      this.unknownFields =
+        UnknownFieldSet.newBuilder(this.unknownFields)
+                       .mergeFrom(unknownFields)
+                       .build();
+      return this;
+    }
+
+    /** Verifies that the field is a field of this message. */
+    private void verifyContainingType(FieldDescriptor field) {
+      if (field.getContainingType() != type) {
+        throw new IllegalArgumentException(
+          "FieldDescriptor does not match message type.");
+      }
+    }
+
+    /** Verifies that the oneof is an oneof of this message. */
+    private void verifyOneofContainingType(OneofDescriptor oneof) {
+      if (oneof.getContainingType() != type) {
+        throw new IllegalArgumentException(
+          "OneofDescriptor does not match message type.");
+      }
+    }
+
+    /** Verifies that the value is EnumValueDescriptor and matches Enum Type. */
+    private void ensureSingularEnumValueDescriptor(
+        FieldDescriptor field, Object value) {
+      if (value == null) {
+        throw new NullPointerException();
+      }
+      if (!(value instanceof EnumValueDescriptor)) {
+        throw new IllegalArgumentException(
+          "DynamicMessage should use EnumValueDescriptor to set Enum Value.");
+      }
+      // TODO(xiaofeng): Re-enable this check after Orgstore is fixed to not
+      // set incorrect EnumValueDescriptors.
+      // EnumDescriptor fieldType = field.getEnumType();
+      // EnumDescriptor fieldValueType = ((EnumValueDescriptor) value).getType();
+      // if (fieldType != fieldValueType) {
+      //  throw new IllegalArgumentException(String.format(
+      //      "EnumDescriptor %s of field doesn't match EnumDescriptor %s of field value",
+      //      fieldType.getFullName(), fieldValueType.getFullName()));
+      // }
+    }
+
+    /** Verifies the value for an enum field. */
+    private void ensureEnumValueDescriptor(
+        FieldDescriptor field, Object value) {
+      if (field.isRepeated()) {
+        for (Object item : (List) value) {
+          ensureSingularEnumValueDescriptor(field, item);
+        }
+      } else {
+         ensureSingularEnumValueDescriptor(field, value);
+      }
+    }
+
+    private void ensureIsMutable() {
+      if (fields.isImmutable()) {
+        fields = fields.clone();
+      }
+    }
+
+    @Override
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder getFieldBuilder(FieldDescriptor field) {
+      // TODO(xiangl): need implementation for dynamic message
+      throw new UnsupportedOperationException(
+        "getFieldBuilder() called on a dynamic message type.");
+    }
+
+    @Override
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder getRepeatedFieldBuilder(FieldDescriptor field,
+        int index) {
+      throw new UnsupportedOperationException(
+        "getRepeatedFieldBuilder() called on a dynamic message type.");
+    }
+  }
+}