You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/07/06 20:13:22 UTC

[07/53] [abbrv] [partial] hbase git commit: HBASE-17056 Remove checked in PB generated files Selective add of dependency on hbase-thirdparty jars. Update to READMEs on how protobuf is done (and update to refguide) Removed all checked in generated protobu

http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java
deleted file mode 100644
index 71975c2..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java
+++ /dev/null
@@ -1,5424 +0,0 @@
-// Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: google/protobuf/compiler/plugin.proto
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler;
-
-public final class PluginProtos {
-  private PluginProtos() {}
-  public static void registerAllExtensions(
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
-  }
-
-  public static void registerAllExtensions(
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
-    registerAllExtensions(
-        (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
-  }
-  public interface VersionOrBuilder extends
-      // @@protoc_insertion_point(interface_extends:google.protobuf.compiler.Version)
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
-
-    /**
-     * <code>optional int32 major = 1;</code>
-     */
-    boolean hasMajor();
-    /**
-     * <code>optional int32 major = 1;</code>
-     */
-    int getMajor();
-
-    /**
-     * <code>optional int32 minor = 2;</code>
-     */
-    boolean hasMinor();
-    /**
-     * <code>optional int32 minor = 2;</code>
-     */
-    int getMinor();
-
-    /**
-     * <code>optional int32 patch = 3;</code>
-     */
-    boolean hasPatch();
-    /**
-     * <code>optional int32 patch = 3;</code>
-     */
-    int getPatch();
-
-    /**
-     * <pre>
-     * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-     * be empty for mainline stable releases.
-     * </pre>
-     *
-     * <code>optional string suffix = 4;</code>
-     */
-    boolean hasSuffix();
-    /**
-     * <pre>
-     * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-     * be empty for mainline stable releases.
-     * </pre>
-     *
-     * <code>optional string suffix = 4;</code>
-     */
-    java.lang.String getSuffix();
-    /**
-     * <pre>
-     * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-     * be empty for mainline stable releases.
-     * </pre>
-     *
-     * <code>optional string suffix = 4;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getSuffixBytes();
-  }
-  /**
-   * <pre>
-   * The version number of protocol compiler.
-   * </pre>
-   *
-   * Protobuf type {@code google.protobuf.compiler.Version}
-   */
-  public  static final class Version extends
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
-      // @@protoc_insertion_point(message_implements:google.protobuf.compiler.Version)
-      VersionOrBuilder {
-    // Use Version.newBuilder() to construct.
-    private Version(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
-      super(builder);
-    }
-    private Version() {
-      major_ = 0;
-      minor_ = 0;
-      patch_ = 0;
-      suffix_ = "";
-    }
-
-    @java.lang.Override
-    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
-    getUnknownFields() {
-      return this.unknownFields;
-    }
-    private Version(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      this();
-      int mutable_bitField0_ = 0;
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 8: {
-              bitField0_ |= 0x00000001;
-              major_ = input.readInt32();
-              break;
-            }
-            case 16: {
-              bitField0_ |= 0x00000002;
-              minor_ = input.readInt32();
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              patch_ = input.readInt32();
-              break;
-            }
-            case 34: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
-              bitField0_ |= 0x00000008;
-              suffix_ = bs;
-              break;
-            }
-          }
-        }
-      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
-            e).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_Version_descriptor;
-    }
-
-    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_Version_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.Builder.class);
-    }
-
-    private int bitField0_;
-    public static final int MAJOR_FIELD_NUMBER = 1;
-    private int major_;
-    /**
-     * <code>optional int32 major = 1;</code>
-     */
-    public boolean hasMajor() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>optional int32 major = 1;</code>
-     */
-    public int getMajor() {
-      return major_;
-    }
-
-    public static final int MINOR_FIELD_NUMBER = 2;
-    private int minor_;
-    /**
-     * <code>optional int32 minor = 2;</code>
-     */
-    public boolean hasMinor() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    /**
-     * <code>optional int32 minor = 2;</code>
-     */
-    public int getMinor() {
-      return minor_;
-    }
-
-    public static final int PATCH_FIELD_NUMBER = 3;
-    private int patch_;
-    /**
-     * <code>optional int32 patch = 3;</code>
-     */
-    public boolean hasPatch() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    /**
-     * <code>optional int32 patch = 3;</code>
-     */
-    public int getPatch() {
-      return patch_;
-    }
-
-    public static final int SUFFIX_FIELD_NUMBER = 4;
-    private volatile java.lang.Object suffix_;
-    /**
-     * <pre>
-     * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-     * be empty for mainline stable releases.
-     * </pre>
-     *
-     * <code>optional string suffix = 4;</code>
-     */
-    public boolean hasSuffix() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    /**
-     * <pre>
-     * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-     * be empty for mainline stable releases.
-     * </pre>
-     *
-     * <code>optional string suffix = 4;</code>
-     */
-    public java.lang.String getSuffix() {
-      java.lang.Object ref = suffix_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
-            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          suffix_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <pre>
-     * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-     * be empty for mainline stable releases.
-     * </pre>
-     *
-     * <code>optional string suffix = 4;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getSuffixBytes() {
-      java.lang.Object ref = suffix_;
-      if (ref instanceof java.lang.String) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        suffix_ = b;
-        return b;
-      } else {
-        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized == 1) return true;
-      if (isInitialized == 0) return false;
-
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeInt32(1, major_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeInt32(2, minor_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeInt32(3, patch_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, suffix_);
-      }
-      unknownFields.writeTo(output);
-    }
-
-    public int getSerializedSize() {
-      int size = memoizedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeInt32Size(1, major_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeInt32Size(2, minor_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeInt32Size(3, patch_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, suffix_);
-      }
-      size += unknownFields.getSerializedSize();
-      memoizedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version)) {
-        return super.equals(obj);
-      }
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version) obj;
-
-      boolean result = true;
-      result = result && (hasMajor() == other.hasMajor());
-      if (hasMajor()) {
-        result = result && (getMajor()
-            == other.getMajor());
-      }
-      result = result && (hasMinor() == other.hasMinor());
-      if (hasMinor()) {
-        result = result && (getMinor()
-            == other.getMinor());
-      }
-      result = result && (hasPatch() == other.hasPatch());
-      if (hasPatch()) {
-        result = result && (getPatch()
-            == other.getPatch());
-      }
-      result = result && (hasSuffix() == other.hasSuffix());
-      if (hasSuffix()) {
-        result = result && getSuffix()
-            .equals(other.getSuffix());
-      }
-      result = result && unknownFields.equals(other.unknownFields);
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
-      int hash = 41;
-      hash = (19 * hash) + getDescriptor().hashCode();
-      if (hasMajor()) {
-        hash = (37 * hash) + MAJOR_FIELD_NUMBER;
-        hash = (53 * hash) + getMajor();
-      }
-      if (hasMinor()) {
-        hash = (37 * hash) + MINOR_FIELD_NUMBER;
-        hash = (53 * hash) + getMinor();
-      }
-      if (hasPatch()) {
-        hash = (37 * hash) + PATCH_FIELD_NUMBER;
-        hash = (53 * hash) + getPatch();
-      }
-      if (hasSuffix()) {
-        hash = (37 * hash) + SUFFIX_FIELD_NUMBER;
-        hash = (53 * hash) + getSuffix().hashCode();
-      }
-      hash = (29 * hash) + unknownFields.hashCode();
-      memoizedHashCode = hash;
-      return hash;
-    }
-
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(byte[] data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(
-        byte[] data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseDelimitedFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder() {
-      return DEFAULT_INSTANCE.toBuilder();
-    }
-    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version prototype) {
-      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() {
-      return this == DEFAULT_INSTANCE
-          ? new Builder() : new Builder().mergeFrom(this);
-    }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * Protobuf type {@code google.protobuf.compiler.Version}
-     */
-    public static final class Builder extends
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
-        // @@protoc_insertion_point(builder_implements:google.protobuf.compiler.Version)
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.VersionOrBuilder {
-      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_Version_descriptor;
-      }
-
-      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_Version_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.Builder.class);
-      }
-
-      // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-                .alwaysUseFieldBuilders) {
-        }
-      }
-      public Builder clear() {
-        super.clear();
-        major_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        minor_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        patch_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        suffix_ = "";
-        bitField0_ = (bitField0_ & ~0x00000008);
-        return this;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_Version_descriptor;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.getDefaultInstance();
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version build() {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version buildPartial() {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.major_ = major_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.minor_ = minor_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.patch_ = patch_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.suffix_ = suffix_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder clone() {
-        return (Builder) super.clone();
-      }
-      public Builder setField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          Object value) {
-        return (Builder) super.setField(field, value);
-      }
-      public Builder clearField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
-        return (Builder) super.clearField(field);
-      }
-      public Builder clearOneof(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
-        return (Builder) super.clearOneof(oneof);
-      }
-      public Builder setRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          int index, Object value) {
-        return (Builder) super.setRepeatedField(field, index, value);
-      }
-      public Builder addRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          Object value) {
-        return (Builder) super.addRepeatedField(field, value);
-      }
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version) {
-          return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version other) {
-        if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.getDefaultInstance()) return this;
-        if (other.hasMajor()) {
-          setMajor(other.getMajor());
-        }
-        if (other.hasMinor()) {
-          setMinor(other.getMinor());
-        }
-        if (other.hasPatch()) {
-          setPatch(other.getPatch());
-        }
-        if (other.hasSuffix()) {
-          bitField0_ |= 0x00000008;
-          suffix_ = other.suffix_;
-          onChanged();
-        }
-        this.mergeUnknownFields(other.unknownFields);
-        onChanged();
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        return true;
-      }
-
-      public Builder mergeFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version) e.getUnfinishedMessage();
-          throw e.unwrapIOException();
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      private int major_ ;
-      /**
-       * <code>optional int32 major = 1;</code>
-       */
-      public boolean hasMajor() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>optional int32 major = 1;</code>
-       */
-      public int getMajor() {
-        return major_;
-      }
-      /**
-       * <code>optional int32 major = 1;</code>
-       */
-      public Builder setMajor(int value) {
-        bitField0_ |= 0x00000001;
-        major_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 major = 1;</code>
-       */
-      public Builder clearMajor() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        major_ = 0;
-        onChanged();
-        return this;
-      }
-
-      private int minor_ ;
-      /**
-       * <code>optional int32 minor = 2;</code>
-       */
-      public boolean hasMinor() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      /**
-       * <code>optional int32 minor = 2;</code>
-       */
-      public int getMinor() {
-        return minor_;
-      }
-      /**
-       * <code>optional int32 minor = 2;</code>
-       */
-      public Builder setMinor(int value) {
-        bitField0_ |= 0x00000002;
-        minor_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 minor = 2;</code>
-       */
-      public Builder clearMinor() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        minor_ = 0;
-        onChanged();
-        return this;
-      }
-
-      private int patch_ ;
-      /**
-       * <code>optional int32 patch = 3;</code>
-       */
-      public boolean hasPatch() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      /**
-       * <code>optional int32 patch = 3;</code>
-       */
-      public int getPatch() {
-        return patch_;
-      }
-      /**
-       * <code>optional int32 patch = 3;</code>
-       */
-      public Builder setPatch(int value) {
-        bitField0_ |= 0x00000004;
-        patch_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 patch = 3;</code>
-       */
-      public Builder clearPatch() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        patch_ = 0;
-        onChanged();
-        return this;
-      }
-
-      private java.lang.Object suffix_ = "";
-      /**
-       * <pre>
-       * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-       * be empty for mainline stable releases.
-       * </pre>
-       *
-       * <code>optional string suffix = 4;</code>
-       */
-      public boolean hasSuffix() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      /**
-       * <pre>
-       * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-       * be empty for mainline stable releases.
-       * </pre>
-       *
-       * <code>optional string suffix = 4;</code>
-       */
-      public java.lang.String getSuffix() {
-        java.lang.Object ref = suffix_;
-        if (!(ref instanceof java.lang.String)) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
-              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-          java.lang.String s = bs.toStringUtf8();
-          if (bs.isValidUtf8()) {
-            suffix_ = s;
-          }
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <pre>
-       * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-       * be empty for mainline stable releases.
-       * </pre>
-       *
-       * <code>optional string suffix = 4;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-          getSuffixBytes() {
-        java.lang.Object ref = suffix_;
-        if (ref instanceof String) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          suffix_ = b;
-          return b;
-        } else {
-          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <pre>
-       * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-       * be empty for mainline stable releases.
-       * </pre>
-       *
-       * <code>optional string suffix = 4;</code>
-       */
-      public Builder setSuffix(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000008;
-        suffix_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-       * be empty for mainline stable releases.
-       * </pre>
-       *
-       * <code>optional string suffix = 4;</code>
-       */
-      public Builder clearSuffix() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        suffix_ = getDefaultInstance().getSuffix();
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
-       * be empty for mainline stable releases.
-       * </pre>
-       *
-       * <code>optional string suffix = 4;</code>
-       */
-      public Builder setSuffixBytes(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000008;
-        suffix_ = value;
-        onChanged();
-        return this;
-      }
-      public final Builder setUnknownFields(
-          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
-        return super.setUnknownFields(unknownFields);
-      }
-
-      public final Builder mergeUnknownFields(
-          final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
-        return super.mergeUnknownFields(unknownFields);
-      }
-
-
-      // @@protoc_insertion_point(builder_scope:google.protobuf.compiler.Version)
-    }
-
-    // @@protoc_insertion_point(class_scope:google.protobuf.compiler.Version)
-    private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version DEFAULT_INSTANCE;
-    static {
-      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version();
-    }
-
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version getDefaultInstance() {
-      return DEFAULT_INSTANCE;
-    }
-
-    @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Version>
-        PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Version>() {
-      public Version parsePartialFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-          return new Version(input, extensionRegistry);
-      }
-    };
-
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Version> parser() {
-      return PARSER;
-    }
-
-    @java.lang.Override
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Version> getParserForType() {
-      return PARSER;
-    }
-
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version getDefaultInstanceForType() {
-      return DEFAULT_INSTANCE;
-    }
-
-  }
-
-  public interface CodeGeneratorRequestOrBuilder extends
-      // @@protoc_insertion_point(interface_extends:google.protobuf.compiler.CodeGeneratorRequest)
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
-
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    java.util.List<java.lang.String>
-        getFileToGenerateList();
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    int getFileToGenerateCount();
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    java.lang.String getFileToGenerate(int index);
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getFileToGenerateBytes(int index);
-
-    /**
-     * <pre>
-     * The generator parameter passed on the command-line.
-     * </pre>
-     *
-     * <code>optional string parameter = 2;</code>
-     */
-    boolean hasParameter();
-    /**
-     * <pre>
-     * The generator parameter passed on the command-line.
-     * </pre>
-     *
-     * <code>optional string parameter = 2;</code>
-     */
-    java.lang.String getParameter();
-    /**
-     * <pre>
-     * The generator parameter passed on the command-line.
-     * </pre>
-     *
-     * <code>optional string parameter = 2;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getParameterBytes();
-
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> 
-        getProtoFileList();
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getProtoFile(int index);
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    int getProtoFileCount();
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> 
-        getProtoFileOrBuilderList();
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getProtoFileOrBuilder(
-        int index);
-
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * <code>optional .google.protobuf.compiler.Version compiler_version = 3;</code>
-     */
-    boolean hasCompilerVersion();
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * <code>optional .google.protobuf.compiler.Version compiler_version = 3;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version getCompilerVersion();
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * <code>optional .google.protobuf.compiler.Version compiler_version = 3;</code>
-     */
-    org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.VersionOrBuilder getCompilerVersionOrBuilder();
-  }
-  /**
-   * <pre>
-   * An encoded CodeGeneratorRequest is written to the plugin's stdin.
-   * </pre>
-   *
-   * Protobuf type {@code google.protobuf.compiler.CodeGeneratorRequest}
-   */
-  public  static final class CodeGeneratorRequest extends
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
-      // @@protoc_insertion_point(message_implements:google.protobuf.compiler.CodeGeneratorRequest)
-      CodeGeneratorRequestOrBuilder {
-    // Use CodeGeneratorRequest.newBuilder() to construct.
-    private CodeGeneratorRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
-      super(builder);
-    }
-    private CodeGeneratorRequest() {
-      fileToGenerate_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
-      parameter_ = "";
-      protoFile_ = java.util.Collections.emptyList();
-    }
-
-    @java.lang.Override
-    public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
-    getUnknownFields() {
-      return this.unknownFields;
-    }
-    private CodeGeneratorRequest(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      this();
-      int mutable_bitField0_ = 0;
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
-              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-                fileToGenerate_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList();
-                mutable_bitField0_ |= 0x00000001;
-              }
-              fileToGenerate_.add(bs);
-              break;
-            }
-            case 18: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
-              bitField0_ |= 0x00000001;
-              parameter_ = bs;
-              break;
-            }
-            case 26: {
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.Builder subBuilder = null;
-              if (((bitField0_ & 0x00000002) == 0x00000002)) {
-                subBuilder = compilerVersion_.toBuilder();
-              }
-              compilerVersion_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.PARSER, extensionRegistry);
-              if (subBuilder != null) {
-                subBuilder.mergeFrom(compilerVersion_);
-                compilerVersion_ = subBuilder.buildPartial();
-              }
-              bitField0_ |= 0x00000002;
-              break;
-            }
-            case 122: {
-              if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
-                protoFile_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto>();
-                mutable_bitField0_ |= 0x00000004;
-              }
-              protoFile_.add(
-                  input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.PARSER, extensionRegistry));
-              break;
-            }
-          }
-        }
-      } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
-            e).setUnfinishedMessage(this);
-      } finally {
-        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-          fileToGenerate_ = fileToGenerate_.getUnmodifiableView();
-        }
-        if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
-          protoFile_ = java.util.Collections.unmodifiableList(protoFile_);
-        }
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_CodeGeneratorRequest_descriptor;
-    }
-
-    protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_CodeGeneratorRequest_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.Builder.class);
-    }
-
-    private int bitField0_;
-    public static final int FILE_TO_GENERATE_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList fileToGenerate_;
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList
-        getFileToGenerateList() {
-      return fileToGenerate_;
-    }
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    public int getFileToGenerateCount() {
-      return fileToGenerate_.size();
-    }
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    public java.lang.String getFileToGenerate(int index) {
-      return fileToGenerate_.get(index);
-    }
-    /**
-     * <pre>
-     * The .proto files that were explicitly listed on the command-line.  The
-     * code generator should generate code only for these files.  Each file's
-     * descriptor will be included in proto_file, below.
-     * </pre>
-     *
-     * <code>repeated string file_to_generate = 1;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getFileToGenerateBytes(int index) {
-      return fileToGenerate_.getByteString(index);
-    }
-
-    public static final int PARAMETER_FIELD_NUMBER = 2;
-    private volatile java.lang.Object parameter_;
-    /**
-     * <pre>
-     * The generator parameter passed on the command-line.
-     * </pre>
-     *
-     * <code>optional string parameter = 2;</code>
-     */
-    public boolean hasParameter() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <pre>
-     * The generator parameter passed on the command-line.
-     * </pre>
-     *
-     * <code>optional string parameter = 2;</code>
-     */
-    public java.lang.String getParameter() {
-      java.lang.Object ref = parameter_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
-            (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          parameter_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <pre>
-     * The generator parameter passed on the command-line.
-     * </pre>
-     *
-     * <code>optional string parameter = 2;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-        getParameterBytes() {
-      java.lang.Object ref = parameter_;
-      if (ref instanceof java.lang.String) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        parameter_ = b;
-        return b;
-      } else {
-        return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    public static final int PROTO_FILE_FIELD_NUMBER = 15;
-    private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> protoFile_;
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> getProtoFileList() {
-      return protoFile_;
-    }
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> 
-        getProtoFileOrBuilderList() {
-      return protoFile_;
-    }
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    public int getProtoFileCount() {
-      return protoFile_.size();
-    }
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getProtoFile(int index) {
-      return protoFile_.get(index);
-    }
-    /**
-     * <pre>
-     * FileDescriptorProtos for all files in files_to_generate and everything
-     * they import.  The files will appear in topological order, so each file
-     * appears before any file that imports it.
-     * protoc guarantees that all proto_files will be written after
-     * the fields above, even though this is not technically guaranteed by the
-     * protobuf wire format.  This theoretically could allow a plugin to stream
-     * in the FileDescriptorProtos and handle them one by one rather than read
-     * the entire set into memory at once.  However, as of this writing, this
-     * is not similarly optimized on protoc's end -- it will store all fields in
-     * memory at once before sending them to the plugin.
-     * </pre>
-     *
-     * <code>repeated .google.protobuf.FileDescriptorProto proto_file = 15;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getProtoFileOrBuilder(
-        int index) {
-      return protoFile_.get(index);
-    }
-
-    public static final int COMPILER_VERSION_FIELD_NUMBER = 3;
-    private org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version compilerVersion_;
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * <code>optional .google.protobuf.compiler.Version compiler_version = 3;</code>
-     */
-    public boolean hasCompilerVersion() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * <code>optional .google.protobuf.compiler.Version compiler_version = 3;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version getCompilerVersion() {
-      return compilerVersion_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.getDefaultInstance() : compilerVersion_;
-    }
-    /**
-     * <pre>
-     * The version number of protocol compiler.
-     * </pre>
-     *
-     * <code>optional .google.protobuf.compiler.Version compiler_version = 3;</code>
-     */
-    public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.VersionOrBuilder getCompilerVersionOrBuilder() {
-      return compilerVersion_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version.getDefaultInstance() : compilerVersion_;
-    }
-
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized == 1) return true;
-      if (isInitialized == 0) return false;
-
-      for (int i = 0; i < getProtoFileCount(); i++) {
-        if (!getProtoFile(i).isInitialized()) {
-          memoizedIsInitialized = 0;
-          return false;
-        }
-      }
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      for (int i = 0; i < fileToGenerate_.size(); i++) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, fileToGenerate_.getRaw(i));
-      }
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, parameter_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeMessage(3, getCompilerVersion());
-      }
-      for (int i = 0; i < protoFile_.size(); i++) {
-        output.writeMessage(15, protoFile_.get(i));
-      }
-      unknownFields.writeTo(output);
-    }
-
-    public int getSerializedSize() {
-      int size = memoizedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      {
-        int dataSize = 0;
-        for (int i = 0; i < fileToGenerate_.size(); i++) {
-          dataSize += computeStringSizeNoTag(fileToGenerate_.getRaw(i));
-        }
-        size += dataSize;
-        size += 1 * getFileToGenerateList().size();
-      }
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, parameter_);
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeMessageSize(3, getCompilerVersion());
-      }
-      for (int i = 0; i < protoFile_.size(); i++) {
-        size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
-          .computeMessageSize(15, protoFile_.get(i));
-      }
-      size += unknownFields.getSerializedSize();
-      memoizedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest)) {
-        return super.equals(obj);
-      }
-      org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest) obj;
-
-      boolean result = true;
-      result = result && getFileToGenerateList()
-          .equals(other.getFileToGenerateList());
-      result = result && (hasParameter() == other.hasParameter());
-      if (hasParameter()) {
-        result = result && getParameter()
-            .equals(other.getParameter());
-      }
-      result = result && getProtoFileList()
-          .equals(other.getProtoFileList());
-      result = result && (hasCompilerVersion() == other.hasCompilerVersion());
-      if (hasCompilerVersion()) {
-        result = result && getCompilerVersion()
-            .equals(other.getCompilerVersion());
-      }
-      result = result && unknownFields.equals(other.unknownFields);
-      return result;
-    }
-
-    @java.lang.Override
-    public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
-      int hash = 41;
-      hash = (19 * hash) + getDescriptor().hashCode();
-      if (getFileToGenerateCount() > 0) {
-        hash = (37 * hash) + FILE_TO_GENERATE_FIELD_NUMBER;
-        hash = (53 * hash) + getFileToGenerateList().hashCode();
-      }
-      if (hasParameter()) {
-        hash = (37 * hash) + PARAMETER_FIELD_NUMBER;
-        hash = (53 * hash) + getParameter().hashCode();
-      }
-      if (getProtoFileCount() > 0) {
-        hash = (37 * hash) + PROTO_FILE_FIELD_NUMBER;
-        hash = (53 * hash) + getProtoFileList().hashCode();
-      }
-      if (hasCompilerVersion()) {
-        hash = (37 * hash) + COMPILER_VERSION_FIELD_NUMBER;
-        hash = (53 * hash) + getCompilerVersion().hashCode();
-      }
-      hash = (29 * hash) + unknownFields.hashCode();
-      memoizedHashCode = hash;
-      return hash;
-    }
-
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(byte[] data)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(
-        byte[] data,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseDelimitedFrom(
-        java.io.InputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input);
-    }
-    public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-          .parseWithIOException(PARSER, input, extensionRegistry);
-    }
-
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder newBuilder() {
-      return DEFAULT_INSTANCE.toBuilder();
-    }
-    public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest prototype) {
-      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() {
-      return this == DEFAULT_INSTANCE
-          ? new Builder() : new Builder().mergeFrom(this);
-    }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * <pre>
-     * An encoded CodeGeneratorRequest is written to the plugin's stdin.
-     * </pre>
-     *
-     * Protobuf type {@code google.protobuf.compiler.CodeGeneratorRequest}
-     */
-    public static final class Builder extends
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
-        // @@protoc_insertion_point(builder_implements:google.protobuf.compiler.CodeGeneratorRequest)
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequestOrBuilder {
-      public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_CodeGeneratorRequest_descriptor;
-      }
-
-      protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_CodeGeneratorRequest_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.Builder.class);
-      }
-
-      // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
-                .alwaysUseFieldBuilders) {
-          getProtoFileFieldBuilder();
-          getCompilerVersionFieldBuilder();
-        }
-      }
-      public Builder clear() {
-        super.clear();
-        fileToGenerate_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        parameter_ = "";
-        bitField0_ = (bitField0_ & ~0x00000002);
-        if (protoFileBuilder_ == null) {
-          protoFile_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000004);
-        } else {
-          protoFileBuilder_.clear();
-        }
-        if (compilerVersionBuilder_ == null) {
-          compilerVersion_ = null;
-        } else {
-          compilerVersionBuilder_.clear();
-        }
-        bitField0_ = (bitField0_ & ~0x00000008);
-        return this;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.internal_static_google_protobuf_compiler_CodeGeneratorRequest_descriptor;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest getDefaultInstanceForType() {
-        return org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.getDefaultInstance();
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest build() {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest buildPartial() {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((bitField0_ & 0x00000001) == 0x00000001)) {
-          fileToGenerate_ = fileToGenerate_.getUnmodifiableView();
-          bitField0_ = (bitField0_ & ~0x00000001);
-        }
-        result.fileToGenerate_ = fileToGenerate_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.parameter_ = parameter_;
-        if (protoFileBuilder_ == null) {
-          if (((bitField0_ & 0x00000004) == 0x00000004)) {
-            protoFile_ = java.util.Collections.unmodifiableList(protoFile_);
-            bitField0_ = (bitField0_ & ~0x00000004);
-          }
-          result.protoFile_ = protoFile_;
-        } else {
-          result.protoFile_ = protoFileBuilder_.build();
-        }
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        if (compilerVersionBuilder_ == null) {
-          result.compilerVersion_ = compilerVersion_;
-        } else {
-          result.compilerVersion_ = compilerVersionBuilder_.build();
-        }
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder clone() {
-        return (Builder) super.clone();
-      }
-      public Builder setField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          Object value) {
-        return (Builder) super.setField(field, value);
-      }
-      public Builder clearField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
-        return (Builder) super.clearField(field);
-      }
-      public Builder clearOneof(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
-        return (Builder) super.clearOneof(oneof);
-      }
-      public Builder setRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          int index, Object value) {
-        return (Builder) super.setRepeatedField(field, index, value);
-      }
-      public Builder addRepeatedField(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
-          Object value) {
-        return (Builder) super.addRepeatedField(field, value);
-      }
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
-        if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest) {
-          return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest other) {
-        if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest.getDefaultInstance()) return this;
-        if (!other.fileToGenerate_.isEmpty()) {
-          if (fileToGenerate_.isEmpty()) {
-            fileToGenerate_ = other.fileToGenerate_;
-            bitField0_ = (bitField0_ & ~0x00000001);
-          } else {
-            ensureFileToGenerateIsMutable();
-            fileToGenerate_.addAll(other.fileToGenerate_);
-          }
-          onChanged();
-        }
-        if (other.hasParameter()) {
-          bitField0_ |= 0x00000002;
-          parameter_ = other.parameter_;
-          onChanged();
-        }
-        if (protoFileBuilder_ == null) {
-          if (!other.protoFile_.isEmpty()) {
-            if (protoFile_.isEmpty()) {
-              protoFile_ = other.protoFile_;
-              bitField0_ = (bitField0_ & ~0x00000004);
-            } else {
-              ensureProtoFileIsMutable();
-              protoFile_.addAll(other.protoFile_);
-            }
-            onChanged();
-          }
-        } else {
-          if (!other.protoFile_.isEmpty()) {
-            if (protoFileBuilder_.isEmpty()) {
-              protoFileBuilder_.dispose();
-              protoFileBuilder_ = null;
-              protoFile_ = other.protoFile_;
-              bitField0_ = (bitField0_ & ~0x00000004);
-              protoFileBuilder_ = 
-                org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
-                   getProtoFileFieldBuilder() : null;
-            } else {
-              protoFileBuilder_.addAllMessages(other.protoFile_);
-            }
-          }
-        }
-        if (other.hasCompilerVersion()) {
-          mergeCompilerVersion(other.getCompilerVersion());
-        }
-        this.mergeUnknownFields(other.unknownFields);
-        onChanged();
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        for (int i = 0; i < getProtoFileCount(); i++) {
-          if (!getProtoFile(i).isInitialized()) {
-            return false;
-          }
-        }
-        return true;
-      }
-
-      public Builder mergeFrom(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest) e.getUnfinishedMessage();
-          throw e.unwrapIOException();
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList fileToGenerate_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
-      private void ensureFileToGenerateIsMutable() {
-        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-          fileToGenerate_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(fileToGenerate_);
-          bitField0_ |= 0x00000001;
-         }
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList
-          getFileToGenerateList() {
-        return fileToGenerate_.getUnmodifiableView();
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public int getFileToGenerateCount() {
-        return fileToGenerate_.size();
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public java.lang.String getFileToGenerate(int index) {
-        return fileToGenerate_.get(index);
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-          getFileToGenerateBytes(int index) {
-        return fileToGenerate_.getByteString(index);
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public Builder setFileToGenerate(
-          int index, java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureFileToGenerateIsMutable();
-        fileToGenerate_.set(index, value);
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public Builder addFileToGenerate(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureFileToGenerateIsMutable();
-        fileToGenerate_.add(value);
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public Builder addAllFileToGenerate(
-          java.lang.Iterable<java.lang.String> values) {
-        ensureFileToGenerateIsMutable();
-        org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
-            values, fileToGenerate_);
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public Builder clearFileToGenerate() {
-        fileToGenerate_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000001);
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * The .proto files that were explicitly listed on the command-line.  The
-       * code generator should generate code only for these files.  Each file's
-       * descriptor will be included in proto_file, below.
-       * </pre>
-       *
-       * <code>repeated string file_to_generate = 1;</code>
-       */
-      public Builder addFileToGenerateBytes(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  ensureFileToGenerateIsMutable();
-        fileToGenerate_.add(value);
-        onChanged();
-        return this;
-      }
-
-      private java.lang.Object parameter_ = "";
-      /**
-       * <pre>
-       * The generator parameter passed on the command-line.
-       * </pre>
-       *
-       * <code>optional string parameter = 2;</code>
-       */
-      public boolean hasParameter() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      /**
-       * <pre>
-       * The generator parameter passed on the command-line.
-       * </pre>
-       *
-       * <code>optional string parameter = 2;</code>
-       */
-      public java.lang.String getParameter() {
-        java.lang.Object ref = parameter_;
-        if (!(ref instanceof java.lang.String)) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
-              (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-          java.lang.String s = bs.toStringUtf8();
-          if (bs.isValidUtf8()) {
-            parameter_ = s;
-          }
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <pre>
-       * The generator parameter passed on the command-line.
-       * </pre>
-       *
-       * <code>optional string parameter = 2;</code>
-       */
-      public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
-          getParameterBytes() {
-        java.lang.Object ref = parameter_;
-        if (ref instanceof String) {
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
-              org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          parameter_ = b;
-          return b;
-        } else {
-          return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <pre>
-       * The generator parameter passed on the command-line.
-       * </pre>
-       *
-       * <code>optional string parameter = 2;</code>
-       */
-      public Builder setParameter(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        parameter_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * The generator parameter passed on the command-line.
-       * </pre>
-       *
-       * <code>optional string parameter = 2;</code>
-       */
-      public Builder clearParameter() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        parameter_ = getDefaultInstance().getParameter();
-        onChanged();
-        return this;
-      }
-      /**
-       * <pre>
-       * The generator parameter passed on the command-line.
-       * </pre>
-       *
-       * <code>optional string parameter = 2;</code>
-       */
-      public Builder setParameterBytes(
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000002;
-        parameter_ = value;
-        onChanged();
-        return this;
-      }
-
-      private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto> protoFile_ =
-        java.util.Collections.emptyList();
-      private void ensureProtoFileIsMutable() {
-        if (!((bitField0_ & 0x00000004) == 0x00000004)) {
-          protoFile_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto>(protoFile_);
-          bitField0_ |= 0x00000004;
-         }
-      }
-
-      private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
-          org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> protoFileBuilder_;
-
-      /**
-       * <pre>
-       * FileDescriptorProtos for all files in files_to_generate and everything
-       * they import.  The files will appear in topological order, so each file
-       * appears before any file that imports it.
-       * protoc guarantees that all proto_files will be written after
-       * the fields above, even though this is not technically guaranteed by the
-       * protobuf wire format.  This theoretically could allow a plugin to stream
-       * in the FileDescriptorProtos and handle them one by one rather than read
-       * the entire set into memory at once.  However, as of this writing, this
-       * is not similarly optimized on protoc's end -- it will store all fields in
-       * memory at once before sending them to the plugin.
-       * </pre>
-       *
-       * <code>repeated .google.protobuf.FileDescriptor

<TRUNCATED>