You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/07/07 05:08:07 UTC

[15/51] [partial] hbase git commit: Revert "HBASE-17056 Remove checked in PB generated files Selective add of dependency on" Build is unstable and has interesting issues around CLASSPATH. Revert for now.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2b5cd6d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java
new file mode 100644
index 0000000..f3e2ef6
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java
@@ -0,0 +1,532 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * Wrapper message for `string`.
+ * The JSON representation for `StringValue` is JSON string.
+ * </pre>
+ *
+ * Protobuf type {@code google.protobuf.StringValue}
+ */
+public  final class StringValue extends
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+    // @@protoc_insertion_point(message_implements:google.protobuf.StringValue)
+    StringValueOrBuilder {
+  // Use StringValue.newBuilder() to construct.
+  private StringValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+  private StringValue() {
+    value_ = "";
+  }
+
+  @java.lang.Override
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+  }
+  private StringValue(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    int mutable_bitField0_ = 0;
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          default: {
+            if (!input.skipField(tag)) {
+              done = true;
+            }
+            break;
+          }
+          case 10: {
+            java.lang.String s = input.readStringRequireUtf8();
+
+            value_ = s;
+            break;
+          }
+        }
+      }
+    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+          e).setUnfinishedMessage(this);
+    } finally {
+      makeExtensionsImmutable();
+    }
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_StringValue_descriptor;
+  }
+
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internalGetFieldAccessorTable() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_StringValue_fieldAccessorTable
+        .ensureFieldAccessorsInitialized(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.Builder.class);
+  }
+
+  public static final int VALUE_FIELD_NUMBER = 1;
+  private volatile java.lang.Object value_;
+  /**
+   * <pre>
+   * The string value.
+   * </pre>
+   *
+   * <code>string value = 1;</code>
+   */
+  public java.lang.String getValue() {
+    java.lang.Object ref = value_;
+    if (ref instanceof java.lang.String) {
+      return (java.lang.String) ref;
+    } else {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
+          (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      java.lang.String s = bs.toStringUtf8();
+      value_ = s;
+      return s;
+    }
+  }
+  /**
+   * <pre>
+   * The string value.
+   * </pre>
+   *
+   * <code>string value = 1;</code>
+   */
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+      getValueBytes() {
+    java.lang.Object ref = value_;
+    if (ref instanceof java.lang.String) {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
+          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+              (java.lang.String) ref);
+      value_ = b;
+      return b;
+    } else {
+      return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+    }
+  }
+
+  private byte memoizedIsInitialized = -1;
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                      throws java.io.IOException {
+    if (!getValueBytes().isEmpty()) {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, value_);
+    }
+  }
+
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    if (!getValueBytes().isEmpty()) {
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, value_);
+    }
+    memoizedSize = size;
+    return size;
+  }
+
+  private static final long serialVersionUID = 0L;
+  @java.lang.Override
+  public boolean equals(final java.lang.Object obj) {
+    if (obj == this) {
+     return true;
+    }
+    if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue)) {
+      return super.equals(obj);
+    }
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue) obj;
+
+    boolean result = true;
+    result = result && getValue()
+        .equals(other.getValue());
+    return result;
+  }
+
+  @java.lang.Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptor().hashCode();
+    hash = (37 * hash) + VALUE_FIELD_NUMBER;
+    hash = (53 * hash) + getValue().hashCode();
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(byte[] data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(
+      byte[] data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseDelimitedFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseDelimitedFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public Builder newBuilderForType() { return newBuilder(); }
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+  public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+        ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @java.lang.Override
+  protected Builder newBuilderForType(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+  /**
+   * <pre>
+   * Wrapper message for `string`.
+   * The JSON representation for `StringValue` is JSON string.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.StringValue}
+   */
+  public static final class Builder extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+      // @@protoc_insertion_point(builder_implements:google.protobuf.StringValue)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValueOrBuilder {
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_StringValue_descriptor;
+    }
+
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_StringValue_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.Builder.class);
+    }
+
+    // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+    private void maybeForceBuilderInitialization() {
+      if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+    public Builder clear() {
+      super.clear();
+      value_ = "";
+
+      return this;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptorForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_StringValue_descriptor;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue getDefaultInstanceForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.getDefaultInstance();
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue build() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue buildPartial() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue(this);
+      result.value_ = value_;
+      onBuilt();
+      return result;
+    }
+
+    public Builder clone() {
+      return (Builder) super.clone();
+    }
+    public Builder setField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.setField(field, value);
+    }
+    public Builder clearField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return (Builder) super.clearField(field);
+    }
+    public Builder clearOneof(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return (Builder) super.clearOneof(oneof);
+    }
+    public Builder setRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        int index, Object value) {
+      return (Builder) super.setRepeatedField(field, index, value);
+    }
+    public Builder addRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.addRepeatedField(field, value);
+    }
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+      if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue) {
+        return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue)other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue other) {
+      if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue.getDefaultInstance()) return this;
+      if (!other.getValue().isEmpty()) {
+        value_ = other.value_;
+        onChanged();
+      }
+      onChanged();
+      return this;
+    }
+
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    public Builder mergeFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+
+    private java.lang.Object value_ = "";
+    /**
+     * <pre>
+     * The string value.
+     * </pre>
+     *
+     * <code>string value = 1;</code>
+     */
+    public java.lang.String getValue() {
+      java.lang.Object ref = value_;
+      if (!(ref instanceof java.lang.String)) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        value_ = s;
+        return s;
+      } else {
+        return (java.lang.String) ref;
+      }
+    }
+    /**
+     * <pre>
+     * The string value.
+     * </pre>
+     *
+     * <code>string value = 1;</code>
+     */
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+        getValueBytes() {
+      java.lang.Object ref = value_;
+      if (ref instanceof String) {
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        value_ = b;
+        return b;
+      } else {
+        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+      }
+    }
+    /**
+     * <pre>
+     * The string value.
+     * </pre>
+     *
+     * <code>string value = 1;</code>
+     */
+    public Builder setValue(
+        java.lang.String value) {
+      if (value == null) {
+    throw new NullPointerException();
+  }
+  
+      value_ = value;
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * The string value.
+     * </pre>
+     *
+     * <code>string value = 1;</code>
+     */
+    public Builder clearValue() {
+      
+      value_ = getDefaultInstance().getValue();
+      onChanged();
+      return this;
+    }
+    /**
+     * <pre>
+     * The string value.
+     * </pre>
+     *
+     * <code>string value = 1;</code>
+     */
+    public Builder setValueBytes(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+      if (value == null) {
+    throw new NullPointerException();
+  }
+  checkByteStringIsUtf8(value);
+      
+      value_ = value;
+      onChanged();
+      return this;
+    }
+    public final Builder setUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+    public final Builder mergeUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:google.protobuf.StringValue)
+  }
+
+  // @@protoc_insertion_point(class_scope:google.protobuf.StringValue)
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue DEFAULT_INSTANCE;
+  static {
+    DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue();
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StringValue>
+      PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<StringValue>() {
+    public StringValue parsePartialFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+        return new StringValue(input, extensionRegistry);
+    }
+  };
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StringValue> parser() {
+    return PARSER;
+  }
+
+  @java.lang.Override
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<StringValue> getParserForType() {
+    return PARSER;
+  }
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2b5cd6d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValueOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValueOrBuilder.java
new file mode 100644
index 0000000..7ac0ee8
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValueOrBuilder.java
@@ -0,0 +1,27 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/wrappers.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public interface StringValueOrBuilder extends
+    // @@protoc_insertion_point(interface_extends:google.protobuf.StringValue)
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <pre>
+   * The string value.
+   * </pre>
+   *
+   * <code>string value = 1;</code>
+   */
+  java.lang.String getValue();
+  /**
+   * <pre>
+   * The string value.
+   * </pre>
+   *
+   * <code>string value = 1;</code>
+   */
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+      getValueBytes();
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2b5cd6d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java
new file mode 100644
index 0000000..2607900
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java
@@ -0,0 +1,705 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/struct.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * `Struct` represents a structured data value, consisting of fields
+ * which map to dynamically typed values. In some languages, `Struct`
+ * might be supported by a native representation. For example, in
+ * scripting languages like JS a struct is represented as an
+ * object. The details of that representation are described together
+ * with the proto support for the language.
+ * The JSON representation for `Struct` is JSON object.
+ * </pre>
+ *
+ * Protobuf type {@code google.protobuf.Struct}
+ */
+public  final class Struct extends
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+    // @@protoc_insertion_point(message_implements:google.protobuf.Struct)
+    StructOrBuilder {
+  // Use Struct.newBuilder() to construct.
+  private Struct(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+    super(builder);
+  }
+  private Struct() {
+  }
+
+  @java.lang.Override
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+  getUnknownFields() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+  }
+  private Struct(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    this();
+    int mutable_bitField0_ = 0;
+    try {
+      boolean done = false;
+      while (!done) {
+        int tag = input.readTag();
+        switch (tag) {
+          case 0:
+            done = true;
+            break;
+          default: {
+            if (!input.skipField(tag)) {
+              done = true;
+            }
+            break;
+          }
+          case 10: {
+            if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+              fields_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField.newMapField(
+                  FieldsDefaultEntryHolder.defaultEntry);
+              mutable_bitField0_ |= 0x00000001;
+            }
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.MapEntry<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+            fields__ = input.readMessage(
+                FieldsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
+            fields_.getMutableMap().put(
+                fields__.getKey(), fields__.getValue());
+            break;
+          }
+        }
+      }
+    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+      throw e.setUnfinishedMessage(this);
+    } catch (java.io.IOException e) {
+      throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+          e).setUnfinishedMessage(this);
+    } finally {
+      makeExtensionsImmutable();
+    }
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_Struct_descriptor;
+  }
+
+  @SuppressWarnings({"rawtypes"})
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField internalGetMapField(
+      int number) {
+    switch (number) {
+      case 1:
+        return internalGetFields();
+      default:
+        throw new RuntimeException(
+            "Invalid map field number: " + number);
+    }
+  }
+  protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internalGetFieldAccessorTable() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_Struct_fieldAccessorTable
+        .ensureFieldAccessorsInitialized(
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.Builder.class);
+  }
+
+  public static final int FIELDS_FIELD_NUMBER = 1;
+  private static final class FieldsDefaultEntryHolder {
+    static final org.apache.hadoop.hbase.shaded.com.google.protobuf.MapEntry<
+        java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> defaultEntry =
+            org.apache.hadoop.hbase.shaded.com.google.protobuf.MapEntry
+            .<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>newDefaultInstance(
+                org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_Struct_FieldsEntry_descriptor, 
+                org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.FieldType.STRING,
+                "",
+                org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.FieldType.MESSAGE,
+                org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.getDefaultInstance());
+  }
+  private org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField<
+      java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> fields_;
+  private org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+  internalGetFields() {
+    if (fields_ == null) {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField.emptyMapField(
+          FieldsDefaultEntryHolder.defaultEntry);
+    }
+    return fields_;
+  }
+
+  public int getFieldsCount() {
+    return internalGetFields().getMap().size();
+  }
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+
+  public boolean containsFields(
+      java.lang.String key) {
+    if (key == null) { throw new java.lang.NullPointerException(); }
+    return internalGetFields().getMap().containsKey(key);
+  }
+  /**
+   * Use {@link #getFieldsMap()} instead.
+   */
+  @java.lang.Deprecated
+  public java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> getFields() {
+    return getFieldsMap();
+  }
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+
+  public java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> getFieldsMap() {
+    return internalGetFields().getMap();
+  }
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getFieldsOrDefault(
+      java.lang.String key,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Value defaultValue) {
+    if (key == null) { throw new java.lang.NullPointerException(); }
+    java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> map =
+        internalGetFields().getMap();
+    return map.containsKey(key) ? map.get(key) : defaultValue;
+  }
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getFieldsOrThrow(
+      java.lang.String key) {
+    if (key == null) { throw new java.lang.NullPointerException(); }
+    java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> map =
+        internalGetFields().getMap();
+    if (!map.containsKey(key)) {
+      throw new java.lang.IllegalArgumentException();
+    }
+    return map.get(key);
+  }
+
+  private byte memoizedIsInitialized = -1;
+  public final boolean isInitialized() {
+    byte isInitialized = memoizedIsInitialized;
+    if (isInitialized == 1) return true;
+    if (isInitialized == 0) return false;
+
+    memoizedIsInitialized = 1;
+    return true;
+  }
+
+  public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+                      throws java.io.IOException {
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+      .serializeStringMapTo(
+        output,
+        internalGetFields(),
+        FieldsDefaultEntryHolder.defaultEntry,
+        1);
+  }
+
+  public int getSerializedSize() {
+    int size = memoizedSize;
+    if (size != -1) return size;
+
+    size = 0;
+    for (java.util.Map.Entry<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> entry
+         : internalGetFields().getMap().entrySet()) {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.MapEntry<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+      fields__ = FieldsDefaultEntryHolder.defaultEntry.newBuilderForType()
+          .setKey(entry.getKey())
+          .setValue(entry.getValue())
+          .build();
+      size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, fields__);
+    }
+    memoizedSize = size;
+    return size;
+  }
+
+  private static final long serialVersionUID = 0L;
+  @java.lang.Override
+  public boolean equals(final java.lang.Object obj) {
+    if (obj == this) {
+     return true;
+    }
+    if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct)) {
+      return super.equals(obj);
+    }
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct) obj;
+
+    boolean result = true;
+    result = result && internalGetFields().equals(
+        other.internalGetFields());
+    return result;
+  }
+
+  @java.lang.Override
+  public int hashCode() {
+    if (memoizedHashCode != 0) {
+      return memoizedHashCode;
+    }
+    int hash = 41;
+    hash = (19 * hash) + getDescriptor().hashCode();
+    if (!internalGetFields().getMap().isEmpty()) {
+      hash = (37 * hash) + FIELDS_FIELD_NUMBER;
+      hash = (53 * hash) + internalGetFields().hashCode();
+    }
+    hash = (29 * hash) + unknownFields.hashCode();
+    memoizedHashCode = hash;
+    return hash;
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(byte[] data)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(
+      byte[] data,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+    return PARSER.parseFrom(data, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseDelimitedFrom(java.io.InputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseDelimitedFrom(
+      java.io.InputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input);
+  }
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+      throws java.io.IOException {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+        .parseWithIOException(PARSER, input, extensionRegistry);
+  }
+
+  public Builder newBuilderForType() { return newBuilder(); }
+  public static Builder newBuilder() {
+    return DEFAULT_INSTANCE.toBuilder();
+  }
+  public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct prototype) {
+    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+  }
+  public Builder toBuilder() {
+    return this == DEFAULT_INSTANCE
+        ? new Builder() : new Builder().mergeFrom(this);
+  }
+
+  @java.lang.Override
+  protected Builder newBuilderForType(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+    Builder builder = new Builder(parent);
+    return builder;
+  }
+  /**
+   * <pre>
+   * `Struct` represents a structured data value, consisting of fields
+   * which map to dynamically typed values. In some languages, `Struct`
+   * might be supported by a native representation. For example, in
+   * scripting languages like JS a struct is represented as an
+   * object. The details of that representation are described together
+   * with the proto support for the language.
+   * The JSON representation for `Struct` is JSON object.
+   * </pre>
+   *
+   * Protobuf type {@code google.protobuf.Struct}
+   */
+  public static final class Builder extends
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+      // @@protoc_insertion_point(builder_implements:google.protobuf.Struct)
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.StructOrBuilder {
+    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_Struct_descriptor;
+    }
+
+    @SuppressWarnings({"rawtypes"})
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField internalGetMapField(
+        int number) {
+      switch (number) {
+        case 1:
+          return internalGetFields();
+        default:
+          throw new RuntimeException(
+              "Invalid map field number: " + number);
+      }
+    }
+    @SuppressWarnings({"rawtypes"})
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField internalGetMutableMapField(
+        int number) {
+      switch (number) {
+        case 1:
+          return internalGetMutableFields();
+        default:
+          throw new RuntimeException(
+              "Invalid map field number: " + number);
+      }
+    }
+    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_Struct_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.Builder.class);
+    }
+
+    // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.newBuilder()
+    private Builder() {
+      maybeForceBuilderInitialization();
+    }
+
+    private Builder(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      super(parent);
+      maybeForceBuilderInitialization();
+    }
+    private void maybeForceBuilderInitialization() {
+      if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+              .alwaysUseFieldBuilders) {
+      }
+    }
+    public Builder clear() {
+      super.clear();
+      internalGetMutableFields().clear();
+      return this;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+        getDescriptorForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_Struct_descriptor;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct getDefaultInstanceForType() {
+      return org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.getDefaultInstance();
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct build() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct result = buildPartial();
+      if (!result.isInitialized()) {
+        throw newUninitializedMessageException(result);
+      }
+      return result;
+    }
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct buildPartial() {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct(this);
+      int from_bitField0_ = bitField0_;
+      result.fields_ = internalGetFields();
+      result.fields_.makeImmutable();
+      onBuilt();
+      return result;
+    }
+
+    public Builder clone() {
+      return (Builder) super.clone();
+    }
+    public Builder setField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.setField(field, value);
+    }
+    public Builder clearField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+      return (Builder) super.clearField(field);
+    }
+    public Builder clearOneof(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+      return (Builder) super.clearOneof(oneof);
+    }
+    public Builder setRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        int index, Object value) {
+      return (Builder) super.setRepeatedField(field, index, value);
+    }
+    public Builder addRepeatedField(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+        Object value) {
+      return (Builder) super.addRepeatedField(field, value);
+    }
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+      if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct) {
+        return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct)other);
+      } else {
+        super.mergeFrom(other);
+        return this;
+      }
+    }
+
+    public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct other) {
+      if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct.getDefaultInstance()) return this;
+      internalGetMutableFields().mergeFrom(
+          other.internalGetFields());
+      onChanged();
+      return this;
+    }
+
+    public final boolean isInitialized() {
+      return true;
+    }
+
+    public Builder mergeFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parsedMessage = null;
+      try {
+        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+        parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct) e.getUnfinishedMessage();
+        throw e.unwrapIOException();
+      } finally {
+        if (parsedMessage != null) {
+          mergeFrom(parsedMessage);
+        }
+      }
+      return this;
+    }
+    private int bitField0_;
+
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField<
+        java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> fields_;
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+    internalGetFields() {
+      if (fields_ == null) {
+        return org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField.emptyMapField(
+            FieldsDefaultEntryHolder.defaultEntry);
+      }
+      return fields_;
+    }
+    private org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+    internalGetMutableFields() {
+      onChanged();;
+      if (fields_ == null) {
+        fields_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.MapField.newMapField(
+            FieldsDefaultEntryHolder.defaultEntry);
+      }
+      if (!fields_.isMutable()) {
+        fields_ = fields_.copy();
+      }
+      return fields_;
+    }
+
+    public int getFieldsCount() {
+      return internalGetFields().getMap().size();
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+
+    public boolean containsFields(
+        java.lang.String key) {
+      if (key == null) { throw new java.lang.NullPointerException(); }
+      return internalGetFields().getMap().containsKey(key);
+    }
+    /**
+     * Use {@link #getFieldsMap()} instead.
+     */
+    @java.lang.Deprecated
+    public java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> getFields() {
+      return getFieldsMap();
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+
+    public java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> getFieldsMap() {
+      return internalGetFields().getMap();
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getFieldsOrDefault(
+        java.lang.String key,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Value defaultValue) {
+      if (key == null) { throw new java.lang.NullPointerException(); }
+      java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> map =
+          internalGetFields().getMap();
+      return map.containsKey(key) ? map.get(key) : defaultValue;
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+
+    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getFieldsOrThrow(
+        java.lang.String key) {
+      if (key == null) { throw new java.lang.NullPointerException(); }
+      java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> map =
+          internalGetFields().getMap();
+      if (!map.containsKey(key)) {
+        throw new java.lang.IllegalArgumentException();
+      }
+      return map.get(key);
+    }
+
+    public Builder clearFields() {
+      internalGetMutableFields().getMutableMap()
+          .clear();
+      return this;
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+
+    public Builder removeFields(
+        java.lang.String key) {
+      if (key == null) { throw new java.lang.NullPointerException(); }
+      internalGetMutableFields().getMutableMap()
+          .remove(key);
+      return this;
+    }
+    /**
+     * Use alternate mutation accessors instead.
+     */
+    @java.lang.Deprecated
+    public java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+    getMutableFields() {
+      return internalGetMutableFields().getMutableMap();
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+    public Builder putFields(
+        java.lang.String key,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.Value value) {
+      if (key == null) { throw new java.lang.NullPointerException(); }
+      if (value == null) { throw new java.lang.NullPointerException(); }
+      internalGetMutableFields().getMutableMap()
+          .put(key, value);
+      return this;
+    }
+    /**
+     * <pre>
+     * Unordered map of dynamically typed values.
+     * </pre>
+     *
+     * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+     */
+
+    public Builder putAllFields(
+        java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> values) {
+      internalGetMutableFields().getMutableMap()
+          .putAll(values);
+      return this;
+    }
+    public final Builder setUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+    public final Builder mergeUnknownFields(
+        final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+      return this;
+    }
+
+
+    // @@protoc_insertion_point(builder_scope:google.protobuf.Struct)
+  }
+
+  // @@protoc_insertion_point(class_scope:google.protobuf.Struct)
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct DEFAULT_INSTANCE;
+  static {
+    DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct();
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct getDefaultInstance() {
+    return DEFAULT_INSTANCE;
+  }
+
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Struct>
+      PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Struct>() {
+    public Struct parsePartialFrom(
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+        return new Struct(input, extensionRegistry);
+    }
+  };
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Struct> parser() {
+    return PARSER;
+  }
+
+  @java.lang.Override
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Struct> getParserForType() {
+    return PARSER;
+  }
+
+  public org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct getDefaultInstanceForType() {
+    return DEFAULT_INSTANCE;
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2b5cd6d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructOrBuilder.java
new file mode 100644
index 0000000..229f540
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructOrBuilder.java
@@ -0,0 +1,63 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/struct.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public interface StructOrBuilder extends
+    // @@protoc_insertion_point(interface_extends:google.protobuf.Struct)
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+  int getFieldsCount();
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+  boolean containsFields(
+      java.lang.String key);
+  /**
+   * Use {@link #getFieldsMap()} instead.
+   */
+  @java.lang.Deprecated
+  java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+  getFields();
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+  java.util.Map<java.lang.String, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
+  getFieldsMap();
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getFieldsOrDefault(
+      java.lang.String key,
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Value defaultValue);
+  /**
+   * <pre>
+   * Unordered map of dynamically typed values.
+   * </pre>
+   *
+   * <code>map&lt;string, .google.protobuf.Value&gt; fields = 1;</code>
+   */
+
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getFieldsOrThrow(
+      java.lang.String key);
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2b5cd6d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructProto.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructProto.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructProto.java
new file mode 100644
index 0000000..4d5cd4b
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructProto.java
@@ -0,0 +1,103 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/struct.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+public final class StructProto {
+  private StructProto() {}
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
+  }
+  static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+    internal_static_google_protobuf_Struct_descriptor;
+  static final 
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_protobuf_Struct_fieldAccessorTable;
+  static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+    internal_static_google_protobuf_Struct_FieldsEntry_descriptor;
+  static final 
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_protobuf_Struct_FieldsEntry_fieldAccessorTable;
+  static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+    internal_static_google_protobuf_Value_descriptor;
+  static final 
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_protobuf_Value_fieldAccessorTable;
+  static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+    internal_static_google_protobuf_ListValue_descriptor;
+  static final 
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_google_protobuf_ListValue_fieldAccessorTable;
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static  org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\034google/protobuf/struct.proto\022\017google.p" +
+      "rotobuf\"\204\001\n\006Struct\0223\n\006fields\030\001 \003(\0132#.goo" +
+      "gle.protobuf.Struct.FieldsEntry\032E\n\013Field" +
+      "sEntry\022\013\n\003key\030\001 \001(\t\022%\n\005value\030\002 \001(\0132\026.goo" +
+      "gle.protobuf.Value:\0028\001\"\352\001\n\005Value\0220\n\nnull" +
+      "_value\030\001 \001(\0162\032.google.protobuf.NullValue" +
+      "H\000\022\026\n\014number_value\030\002 \001(\001H\000\022\026\n\014string_val" +
+      "ue\030\003 \001(\tH\000\022\024\n\nbool_value\030\004 \001(\010H\000\022/\n\014stru" +
+      "ct_value\030\005 \001(\0132\027.google.protobuf.StructH" +
+      "\000\0220\n\nlist_value\030\006 \001(\0132\032.google.protobuf.",
+      "ListValueH\000B\006\n\004kind\"3\n\tListValue\022&\n\006valu" +
+      "es\030\001 \003(\0132\026.google.protobuf.Value*\033\n\tNull" +
+      "Value\022\016\n\nNULL_VALUE\020\000B\201\001\n\023com.google.pro" +
+      "tobufB\013StructProtoP\001Z1github.com/golang/" +
+      "protobuf/ptypes/struct;structpb\370\001\001\242\002\003GPB" +
+      "\252\002\036Google.Protobuf.WellKnownTypesb\006proto" +
+      "3"
+    };
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
+          public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+              org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
+            descriptor = root;
+            return null;
+          }
+        };
+    org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+    internal_static_google_protobuf_Struct_descriptor =
+      getDescriptor().getMessageTypes().get(0);
+    internal_static_google_protobuf_Struct_fieldAccessorTable = new
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_google_protobuf_Struct_descriptor,
+        new java.lang.String[] { "Fields", });
+    internal_static_google_protobuf_Struct_FieldsEntry_descriptor =
+      internal_static_google_protobuf_Struct_descriptor.getNestedTypes().get(0);
+    internal_static_google_protobuf_Struct_FieldsEntry_fieldAccessorTable = new
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_google_protobuf_Struct_FieldsEntry_descriptor,
+        new java.lang.String[] { "Key", "Value", });
+    internal_static_google_protobuf_Value_descriptor =
+      getDescriptor().getMessageTypes().get(1);
+    internal_static_google_protobuf_Value_fieldAccessorTable = new
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_google_protobuf_Value_descriptor,
+        new java.lang.String[] { "NullValue", "NumberValue", "StringValue", "BoolValue", "StructValue", "ListValue", "Kind", });
+    internal_static_google_protobuf_ListValue_descriptor =
+      getDescriptor().getMessageTypes().get(2);
+    internal_static_google_protobuf_ListValue_fieldAccessorTable = new
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_google_protobuf_ListValue_descriptor,
+        new java.lang.String[] { "Values", });
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2b5cd6d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Syntax.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Syntax.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Syntax.java
new file mode 100644
index 0000000..f03d70b
--- /dev/null
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Syntax.java
@@ -0,0 +1,123 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: google/protobuf/type.proto
+
+package org.apache.hadoop.hbase.shaded.com.google.protobuf;
+
+/**
+ * <pre>
+ * The syntax in which a protocol buffer element is defined.
+ * </pre>
+ *
+ * Protobuf enum {@code google.protobuf.Syntax}
+ */
+public enum Syntax
+    implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
+  /**
+   * <pre>
+   * Syntax `proto2`.
+   * </pre>
+   *
+   * <code>SYNTAX_PROTO2 = 0;</code>
+   */
+  SYNTAX_PROTO2(0),
+  /**
+   * <pre>
+   * Syntax `proto3`.
+   * </pre>
+   *
+   * <code>SYNTAX_PROTO3 = 1;</code>
+   */
+  SYNTAX_PROTO3(1),
+  UNRECOGNIZED(-1),
+  ;
+
+  /**
+   * <pre>
+   * Syntax `proto2`.
+   * </pre>
+   *
+   * <code>SYNTAX_PROTO2 = 0;</code>
+   */
+  public static final int SYNTAX_PROTO2_VALUE = 0;
+  /**
+   * <pre>
+   * Syntax `proto3`.
+   * </pre>
+   *
+   * <code>SYNTAX_PROTO3 = 1;</code>
+   */
+  public static final int SYNTAX_PROTO3_VALUE = 1;
+
+
+  public final int getNumber() {
+    if (this == UNRECOGNIZED) {
+      throw new java.lang.IllegalArgumentException(
+          "Can't get the number of an unknown enum value.");
+    }
+    return value;
+  }
+
+  /**
+   * @deprecated Use {@link #forNumber(int)} instead.
+   */
+  @java.lang.Deprecated
+  public static Syntax valueOf(int value) {
+    return forNumber(value);
+  }
+
+  public static Syntax forNumber(int value) {
+    switch (value) {
+      case 0: return SYNTAX_PROTO2;
+      case 1: return SYNTAX_PROTO3;
+      default: return null;
+    }
+  }
+
+  public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Syntax>
+      internalGetValueMap() {
+    return internalValueMap;
+  }
+  private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+      Syntax> internalValueMap =
+        new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<Syntax>() {
+          public Syntax findValueByNumber(int number) {
+            return Syntax.forNumber(number);
+          }
+        };
+
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor
+      getValueDescriptor() {
+    return getDescriptor().getValues().get(ordinal());
+  }
+  public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+      getDescriptorForType() {
+    return getDescriptor();
+  }
+  public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor
+      getDescriptor() {
+    return org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.getDescriptor().getEnumTypes().get(0);
+  }
+
+  private static final Syntax[] VALUES = values();
+
+  public static Syntax valueOf(
+      org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+    if (desc.getType() != getDescriptor()) {
+      throw new java.lang.IllegalArgumentException(
+        "EnumValueDescriptor is not for this type.");
+    }
+    if (desc.getIndex() == -1) {
+      return UNRECOGNIZED;
+    }
+    return VALUES[desc.getIndex()];
+  }
+
+  private final int value;
+
+  private Syntax(int value) {
+    this.value = value;
+  }
+
+  // @@protoc_insertion_point(enum_scope:google.protobuf.Syntax)
+}
+