You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/09/30 00:30:40 UTC

[11/29] hbase git commit: HBASE-16567 Upgrade to protobuf-3.1.x Regenerate all protos in this module with protoc3. Redo ByteStringer to use new pb3.1.0 unsafebytesutil instead of HBaseZeroCopyByteString

http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
index eeabfb0..4426013 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class MasterProtos {
   private MasterProtos() {}
   public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
       com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
    * Protobuf enum {@code hbase.pb.MasterSwitchType}
@@ -16,11 +22,11 @@ public final class MasterProtos {
     /**
      * <code>SPLIT = 0;</code>
      */
-    SPLIT(0, 0),
+    SPLIT(0),
     /**
      * <code>MERGE = 1;</code>
      */
-    MERGE(1, 1),
+    MERGE(1),
     ;
 
     /**
@@ -33,9 +39,19 @@ public final class MasterProtos {
     public static final int MERGE_VALUE = 1;
 
 
-    public final int getNumber() { return value; }
+    public final int getNumber() {
+      return value;
+    }
 
+    /**
+     * @deprecated Use {@link #forNumber(int)} instead.
+     */
+    @java.lang.Deprecated
     public static MasterSwitchType valueOf(int value) {
+      return forNumber(value);
+    }
+
+    public static MasterSwitchType forNumber(int value) {
       switch (value) {
         case 0: return SPLIT;
         case 1: return MERGE;
@@ -47,17 +63,17 @@ public final class MasterProtos {
         internalGetValueMap() {
       return internalValueMap;
     }
-    private static com.google.protobuf.Internal.EnumLiteMap<MasterSwitchType>
-        internalValueMap =
+    private static final com.google.protobuf.Internal.EnumLiteMap<
+        MasterSwitchType> internalValueMap =
           new com.google.protobuf.Internal.EnumLiteMap<MasterSwitchType>() {
             public MasterSwitchType findValueByNumber(int number) {
-              return MasterSwitchType.valueOf(number);
+              return MasterSwitchType.forNumber(number);
             }
           };
 
     public final com.google.protobuf.Descriptors.EnumValueDescriptor
         getValueDescriptor() {
-      return getDescriptor().getValues().get(index);
+      return getDescriptor().getValues().get(ordinal());
     }
     public final com.google.protobuf.Descriptors.EnumDescriptor
         getDescriptorForType() {
@@ -79,21 +95,19 @@ public final class MasterProtos {
       return VALUES[desc.getIndex()];
     }
 
-    private final int index;
     private final int value;
 
-    private MasterSwitchType(int index, int value) {
-      this.index = index;
+    private MasterSwitchType(int value) {
       this.value = value;
     }
 
     // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
   }
 
-  public interface AddColumnRequestOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface AddColumnRequestOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.AddColumnRequest)
+      com.google.protobuf.MessageOrBuilder {
 
-    // required .hbase.pb.TableName table_name = 1;
     /**
      * <code>required .hbase.pb.TableName table_name = 1;</code>
      */
@@ -107,7 +121,6 @@ public final class MasterProtos {
      */
     org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
 
-    // required .hbase.pb.ColumnFamilySchema column_families = 2;
     /**
      * <code>required .hbase.pb.ColumnFamilySchema column_families = 2;</code>
      */
@@ -121,7 +134,6 @@ public final class MasterProtos {
      */
     org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder();
 
-    // optional uint64 nonce_group = 3 [default = 0];
     /**
      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
      */
@@ -131,7 +143,6 @@ public final class MasterProtos {
      */
     long getNonceGroup();
 
-    // optional uint64 nonce = 4 [default = 0];
     /**
      * <code>optional uint64 nonce = 4 [default = 0];</code>
      */
@@ -144,36 +155,29 @@ public final class MasterProtos {
   /**
    * Protobuf type {@code hbase.pb.AddColumnRequest}
    */
-  public static final class AddColumnRequest extends
-      com.google.protobuf.GeneratedMessage
-      implements AddColumnRequestOrBuilder {
+  public  static final class AddColumnRequest extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.AddColumnRequest)
+      AddColumnRequestOrBuilder {
     // Use AddColumnRequest.newBuilder() to construct.
-    private AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private AddColumnRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private AddColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final AddColumnRequest defaultInstance;
-    public static AddColumnRequest getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public AddColumnRequest getDefaultInstanceForType() {
-      return defaultInstance;
+    private AddColumnRequest() {
+      nonceGroup_ = 0L;
+      nonce_ = 0L;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private AddColumnRequest(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -234,7 +238,7 @@ public final class MasterProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -245,30 +249,14 @@ public final class MasterProtos {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<AddColumnRequest> PARSER =
-        new com.google.protobuf.AbstractParser<AddColumnRequest>() {
-      public AddColumnRequest parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new AddColumnRequest(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<AddColumnRequest> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // required .hbase.pb.TableName table_name = 1;
     public static final int TABLE_NAME_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
     /**
@@ -281,16 +269,15 @@ public final class MasterProtos {
      * <code>required .hbase.pb.TableName table_name = 1;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
-      return tableName_;
+      return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
     }
     /**
      * <code>required .hbase.pb.TableName table_name = 1;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
-      return tableName_;
+      return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
     }
 
-    // required .hbase.pb.ColumnFamilySchema column_families = 2;
     public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2;
     private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_;
     /**
@@ -303,16 +290,15 @@ public final class MasterProtos {
      * <code>required .hbase.pb.ColumnFamilySchema column_families = 2;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() {
-      return columnFamilies_;
+      return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_;
     }
     /**
      * <code>required .hbase.pb.ColumnFamilySchema column_families = 2;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() {
-      return columnFamilies_;
+      return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_;
     }
 
-    // optional uint64 nonce_group = 3 [default = 0];
     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
     private long nonceGroup_;
     /**
@@ -328,7 +314,6 @@ public final class MasterProtos {
       return nonceGroup_;
     }
 
-    // optional uint64 nonce = 4 [default = 0];
     public static final int NONCE_FIELD_NUMBER = 4;
     private long nonce_;
     /**
@@ -344,16 +329,11 @@ public final class MasterProtos {
       return nonce_;
     }
 
-    private void initFields() {
-      tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
-      nonceGroup_ = 0L;
-      nonce_ = 0L;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       if (!hasTableName()) {
         memoizedIsInitialized = 0;
@@ -377,12 +357,11 @@ public final class MasterProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeMessage(1, tableName_);
+        output.writeMessage(1, getTableName());
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeMessage(2, columnFamilies_);
+        output.writeMessage(2, getColumnFamilies());
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
         output.writeUInt64(3, nonceGroup_);
@@ -390,22 +369,21 @@ public final class MasterProtos {
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
         output.writeUInt64(4, nonce_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, tableName_);
+          .computeMessageSize(1, getTableName());
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(2, columnFamilies_);
+          .computeMessageSize(2, getColumnFamilies());
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
         size += com.google.protobuf.CodedOutputStream
@@ -415,19 +393,13 @@ public final class MasterProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt64Size(4, nonce_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -458,12 +430,10 @@ public final class MasterProtos {
         result = result && (getNonce()
             == other.getNonce());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -481,13 +451,15 @@ public final class MasterProtos {
       }
       if (hasNonceGroup()) {
         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getNonceGroup());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getNonceGroup());
       }
       if (hasNonce()) {
         hash = (37 * hash) + NONCE_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getNonce());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getNonce());
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -515,46 +487,57 @@ public final class MasterProtos {
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
@@ -562,14 +545,15 @@ public final class MasterProtos {
      * Protobuf type {@code hbase.pb.AddColumnRequest}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.AddColumnRequest)
+        org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -582,30 +566,27 @@ public final class MasterProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
           getTableNameFieldBuilder();
           getColumnFamiliesFieldBuilder();
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         if (tableNameBuilder_ == null) {
-          tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+          tableName_ = null;
         } else {
           tableNameBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000001);
         if (columnFamiliesBuilder_ == null) {
-          columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+          columnFamilies_ = null;
         } else {
           columnFamiliesBuilder_.clear();
         }
@@ -617,10 +598,6 @@ public final class MasterProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnRequest_descriptor;
@@ -671,6 +648,32 @@ public final class MasterProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest) {
           return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest)other);
@@ -694,25 +697,22 @@ public final class MasterProtos {
         if (other.hasNonce()) {
           setNonce(other.getNonce());
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
       public final boolean isInitialized() {
         if (!hasTableName()) {
-          
           return false;
         }
         if (!hasColumnFamilies()) {
-          
           return false;
         }
         if (!getTableName().isInitialized()) {
-          
           return false;
         }
         if (!getColumnFamilies().isInitialized()) {
-          
           return false;
         }
         return true;
@@ -727,7 +727,7 @@ public final class MasterProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest) e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -737,9 +737,8 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      // required .hbase.pb.TableName table_name = 1;
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
       /**
        * <code>required .hbase.pb.TableName table_name = 1;</code>
@@ -752,7 +751,7 @@ public final class MasterProtos {
        */
       public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
         if (tableNameBuilder_ == null) {
-          return tableName_;
+          return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
         } else {
           return tableNameBuilder_.getMessage();
         }
@@ -793,6 +792,7 @@ public final class MasterProtos {
       public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
         if (tableNameBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
+              tableName_ != null &&
               tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
             tableName_ =
               org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
@@ -811,7 +811,7 @@ public final class MasterProtos {
        */
       public Builder clearTableName() {
         if (tableNameBuilder_ == null) {
-          tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+          tableName_ = null;
           onChanged();
         } else {
           tableNameBuilder_.clear();
@@ -834,19 +834,20 @@ public final class MasterProtos {
         if (tableNameBuilder_ != null) {
           return tableNameBuilder_.getMessageOrBuilder();
         } else {
-          return tableName_;
+          return tableName_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
         }
       }
       /**
        * <code>required .hbase.pb.TableName table_name = 1;</code>
        */
-      private com.google.protobuf.SingleFieldBuilder<
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
           getTableNameFieldBuilder() {
         if (tableNameBuilder_ == null) {
-          tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+          tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
               org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
-                  tableName_,
+                  getTableName(),
                   getParentForChildren(),
                   isClean());
           tableName_ = null;
@@ -854,9 +855,8 @@ public final class MasterProtos {
         return tableNameBuilder_;
       }
 
-      // required .hbase.pb.ColumnFamilySchema column_families = 2;
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = null;
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_;
       /**
        * <code>required .hbase.pb.ColumnFamilySchema column_families = 2;</code>
@@ -869,7 +869,7 @@ public final class MasterProtos {
        */
       public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() {
         if (columnFamiliesBuilder_ == null) {
-          return columnFamilies_;
+          return columnFamilies_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_;
         } else {
           return columnFamiliesBuilder_.getMessage();
         }
@@ -910,6 +910,7 @@ public final class MasterProtos {
       public Builder mergeColumnFamilies(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
         if (columnFamiliesBuilder_ == null) {
           if (((bitField0_ & 0x00000002) == 0x00000002) &&
+              columnFamilies_ != null &&
               columnFamilies_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) {
             columnFamilies_ =
               org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial();
@@ -928,7 +929,7 @@ public final class MasterProtos {
        */
       public Builder clearColumnFamilies() {
         if (columnFamiliesBuilder_ == null) {
-          columnFamilies_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+          columnFamilies_ = null;
           onChanged();
         } else {
           columnFamiliesBuilder_.clear();
@@ -951,19 +952,20 @@ public final class MasterProtos {
         if (columnFamiliesBuilder_ != null) {
           return columnFamiliesBuilder_.getMessageOrBuilder();
         } else {
-          return columnFamilies_;
+          return columnFamilies_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance() : columnFamilies_;
         }
       }
       /**
        * <code>required .hbase.pb.ColumnFamilySchema column_families = 2;</code>
        */
-      private com.google.protobuf.SingleFieldBuilder<
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> 
           getColumnFamiliesFieldBuilder() {
         if (columnFamiliesBuilder_ == null) {
-          columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+          columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
               org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>(
-                  columnFamilies_,
+                  getColumnFamilies(),
                   getParentForChildren(),
                   isClean());
           columnFamilies_ = null;
@@ -971,7 +973,6 @@ public final class MasterProtos {
         return columnFamiliesBuilder_;
       }
 
-      // optional uint64 nonce_group = 3 [default = 0];
       private long nonceGroup_ ;
       /**
        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
@@ -1004,7 +1005,6 @@ public final class MasterProtos {
         return this;
       }
 
-      // optional uint64 nonce = 4 [default = 0];
       private long nonce_ ;
       /**
        * <code>optional uint64 nonce = 4 [default = 0];</code>
@@ -1036,22 +1036,59 @@ public final class MasterProtos {
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.AddColumnRequest)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnRequest)
+    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest DEFAULT_INSTANCE;
     static {
-      defaultInstance = new AddColumnRequest(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest();
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final com.google.protobuf.Parser<AddColumnRequest>
+        PARSER = new com.google.protobuf.AbstractParser<AddColumnRequest>() {
+      public AddColumnRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new AddColumnRequest(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<AddColumnRequest> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<AddColumnRequest> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnRequest)
   }
 
-  public interface AddColumnResponseOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface AddColumnResponseOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.AddColumnResponse)
+      com.google.protobuf.MessageOrBuilder {
 
-    // optional uint64 proc_id = 1;
     /**
      * <code>optional uint64 proc_id = 1;</code>
      */
@@ -1064,36 +1101,28 @@ public final class MasterProtos {
   /**
    * Protobuf type {@code hbase.pb.AddColumnResponse}
    */
-  public static final class AddColumnResponse extends
-      com.google.protobuf.GeneratedMessage
-      implements AddColumnResponseOrBuilder {
+  public  static final class AddColumnResponse extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.AddColumnResponse)
+      AddColumnResponseOrBuilder {
     // Use AddColumnResponse.newBuilder() to construct.
-    private AddColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private AddColumnResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private AddColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final AddColumnResponse defaultInstance;
-    public static AddColumnResponse getDefaultInstance() {
-      return defaultInstance;
     }
-
-    public AddColumnResponse getDefaultInstanceForType() {
-      return defaultInstance;
+    private AddColumnResponse() {
+      procId_ = 0L;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private AddColumnResponse(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -1123,7 +1152,7 @@ public final class MasterProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -1134,30 +1163,14 @@ public final class MasterProtos {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<AddColumnResponse> PARSER =
-        new com.google.protobuf.AbstractParser<AddColumnResponse>() {
-      public AddColumnResponse parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new AddColumnResponse(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<AddColumnResponse> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // optional uint64 proc_id = 1;
     public static final int PROC_ID_FIELD_NUMBER = 1;
     private long procId_;
     /**
@@ -1173,13 +1186,11 @@ public final class MasterProtos {
       return procId_;
     }
 
-    private void initFields() {
-      procId_ = 0L;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       memoizedIsInitialized = 1;
       return true;
@@ -1187,16 +1198,14 @@ public final class MasterProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         output.writeUInt64(1, procId_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
@@ -1204,19 +1213,13 @@ public final class MasterProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt64Size(1, procId_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -1232,12 +1235,10 @@ public final class MasterProtos {
         result = result && (getProcId()
             == other.getProcId());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -1247,9 +1248,10 @@ public final class MasterProtos {
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasProcId()) {
         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getProcId());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getProcId());
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -1277,46 +1279,57 @@ public final class MasterProtos {
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
@@ -1324,14 +1337,15 @@ public final class MasterProtos {
      * Protobuf type {@code hbase.pb.AddColumnResponse}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.AddColumnResponse)
+        org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -1344,18 +1358,15 @@ public final class MasterProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         procId_ = 0L;
@@ -1363,10 +1374,6 @@ public final class MasterProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_AddColumnResponse_descriptor;
@@ -1397,6 +1404,32 @@ public final class MasterProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse) {
           return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse)other);
@@ -1411,7 +1444,8 @@ public final class MasterProtos {
         if (other.hasProcId()) {
           setProcId(other.getProcId());
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
@@ -1428,7 +1462,7 @@ public final class MasterProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse) e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -1438,7 +1472,6 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      // optional uint64 proc_id = 1;
       private long procId_ ;
       /**
        * <code>optional uint64 proc_id = 1;</code>
@@ -1470,22 +1503,59 @@ public final class MasterProtos {
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.AddColumnResponse)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnResponse)
+    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse DEFAULT_INSTANCE;
     static {
-      defaultInstance = new AddColumnResponse(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse();
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final com.google.protobuf.Parser<AddColumnResponse>
+        PARSER = new com.google.protobuf.AbstractParser<AddColumnResponse>() {
+      public AddColumnResponse parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new AddColumnResponse(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<AddColumnResponse> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<AddColumnResponse> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.AddColumnResponse)
   }
 
-  public interface DeleteColumnRequestOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface DeleteColumnRequestOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteColumnRequest)
+      com.google.protobuf.MessageOrBuilder {
 
-    // required .hbase.pb.TableName table_name = 1;
     /**
      * <code>required .hbase.pb.TableName table_name = 1;</code>
      */
@@ -1499,7 +1569,6 @@ public final class MasterProtos {
      */
     org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
 
-    // required bytes column_name = 2;
     /**
      * <code>required bytes column_name = 2;</code>
      */
@@ -1509,7 +1578,6 @@ public final class MasterProtos {
      */
     com.google.protobuf.ByteString getColumnName();
 
-    // optional uint64 nonce_group = 3 [default = 0];
     /**
      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
      */
@@ -1519,7 +1587,6 @@ public final class MasterProtos {
      */
     long getNonceGroup();
 
-    // optional uint64 nonce = 4 [default = 0];
     /**
      * <code>optional uint64 nonce = 4 [default = 0];</code>
      */
@@ -1532,36 +1599,30 @@ public final class MasterProtos {
   /**
    * Protobuf type {@code hbase.pb.DeleteColumnRequest}
    */
-  public static final class DeleteColumnRequest extends
-      com.google.protobuf.GeneratedMessage
-      implements DeleteColumnRequestOrBuilder {
+  public  static final class DeleteColumnRequest extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.DeleteColumnRequest)
+      DeleteColumnRequestOrBuilder {
     // Use DeleteColumnRequest.newBuilder() to construct.
-    private DeleteColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private DeleteColumnRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private DeleteColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final DeleteColumnRequest defaultInstance;
-    public static DeleteColumnRequest getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public DeleteColumnRequest getDefaultInstanceForType() {
-      return defaultInstance;
+    private DeleteColumnRequest() {
+      columnName_ = com.google.protobuf.ByteString.EMPTY;
+      nonceGroup_ = 0L;
+      nonce_ = 0L;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private DeleteColumnRequest(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -1614,7 +1675,7 @@ public final class MasterProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -1625,30 +1686,14 @@ public final class MasterProtos {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<DeleteColumnRequest> PARSER =
-        new com.google.protobuf.AbstractParser<DeleteColumnRequest>() {
-      public DeleteColumnRequest parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new DeleteColumnRequest(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<DeleteColumnRequest> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // required .hbase.pb.TableName table_name = 1;
     public static final int TABLE_NAME_FIELD_NUMBER = 1;
     private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_;
     /**
@@ -1661,16 +1706,15 @@ public final class MasterProtos {
      * <code>required .hbase.pb.TableName table_name = 1;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
-      return tableName_;
+      return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
     }
     /**
      * <code>required .hbase.pb.TableName table_name = 1;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
-      return tableName_;
+      return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
     }
 
-    // required bytes column_name = 2;
     public static final int COLUMN_NAME_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString columnName_;
     /**
@@ -1686,7 +1730,6 @@ public final class MasterProtos {
       return columnName_;
     }
 
-    // optional uint64 nonce_group = 3 [default = 0];
     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
     private long nonceGroup_;
     /**
@@ -1702,7 +1745,6 @@ public final class MasterProtos {
       return nonceGroup_;
     }
 
-    // optional uint64 nonce = 4 [default = 0];
     public static final int NONCE_FIELD_NUMBER = 4;
     private long nonce_;
     /**
@@ -1718,16 +1760,11 @@ public final class MasterProtos {
       return nonce_;
     }
 
-    private void initFields() {
-      tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      columnName_ = com.google.protobuf.ByteString.EMPTY;
-      nonceGroup_ = 0L;
-      nonce_ = 0L;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       if (!hasTableName()) {
         memoizedIsInitialized = 0;
@@ -1747,9 +1784,8 @@ public final class MasterProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeMessage(1, tableName_);
+        output.writeMessage(1, getTableName());
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         output.writeBytes(2, columnName_);
@@ -1760,18 +1796,17 @@ public final class MasterProtos {
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
         output.writeUInt64(4, nonce_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, tableName_);
+          .computeMessageSize(1, getTableName());
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         size += com.google.protobuf.CodedOutputStream
@@ -1785,19 +1820,13 @@ public final class MasterProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt64Size(4, nonce_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -1828,12 +1857,10 @@ public final class MasterProtos {
         result = result && (getNonce()
             == other.getNonce());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -1851,13 +1878,15 @@ public final class MasterProtos {
       }
       if (hasNonceGroup()) {
         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getNonceGroup());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getNonceGroup());
       }
       if (hasNonce()) {
         hash = (37 * hash) + NONCE_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getNonce());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getNonce());
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -1885,46 +1914,57 @@ public final class MasterProtos {
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
@@ -1932,14 +1972,15 @@ public final class MasterProtos {
      * Protobuf type {@code hbase.pb.DeleteColumnRequest}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.DeleteColumnRequest)
+        org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -1952,23 +1993,20 @@ public final class MasterProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
           getTableNameFieldBuilder();
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         if (tableNameBuilder_ == null) {
-          tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+          tableName_ = null;
         } else {
           tableNameBuilder_.clear();
         }
@@ -1982,10 +2020,6 @@ public final class MasterProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnRequest_descriptor;
@@ -2032,6 +2066,32 @@ public final class MasterProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest) {
           return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest)other);
@@ -2055,21 +2115,19 @@ public final class MasterProtos {
         if (other.hasNonce()) {
           setNonce(other.getNonce());
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
       public final boolean isInitialized() {
         if (!hasTableName()) {
-          
           return false;
         }
         if (!hasColumnName()) {
-          
           return false;
         }
         if (!getTableName().isInitialized()) {
-          
           return false;
         }
         return true;
@@ -2084,7 +2142,7 @@ public final class MasterProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest) e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -2094,9 +2152,8 @@ public final class MasterProtos {
       }
       private int bitField0_;
 
-      // required .hbase.pb.TableName table_name = 1;
-      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
+      private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null;
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
       /**
        * <code>required .hbase.pb.TableName table_name = 1;</code>
@@ -2109,7 +2166,7 @@ public final class MasterProtos {
        */
       public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() {
         if (tableNameBuilder_ == null) {
-          return tableName_;
+          return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
         } else {
           return tableNameBuilder_.getMessage();
         }
@@ -2150,6 +2207,7 @@ public final class MasterProtos {
       public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) {
         if (tableNameBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
+              tableName_ != null &&
               tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
             tableName_ =
               org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
@@ -2168,7 +2226,7 @@ public final class MasterProtos {
        */
       public Builder clearTableName() {
         if (tableNameBuilder_ == null) {
-          tableName_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+          tableName_ = null;
           onChanged();
         } else {
           tableNameBuilder_.clear();
@@ -2191,19 +2249,20 @@ public final class MasterProtos {
         if (tableNameBuilder_ != null) {
           return tableNameBuilder_.getMessageOrBuilder();
         } else {
-          return tableName_;
+          return tableName_ == null ?
+              org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_;
         }
       }
       /**
        * <code>required .hbase.pb.TableName table_name = 1;</code>
        */
-      private com.google.protobuf.SingleFieldBuilder<
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> 
           getTableNameFieldBuilder() {
         if (tableNameBuilder_ == null) {
-          tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+          tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
               org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
-                  tableName_,
+                  getTableName(),
                   getParentForChildren(),
                   isClean());
           tableName_ = null;
@@ -2211,7 +2270,6 @@ public final class MasterProtos {
         return tableNameBuilder_;
       }
 
-      // required bytes column_name = 2;
       private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY;
       /**
        * <code>required bytes column_name = 2;</code>
@@ -2247,7 +2305,6 @@ public final class MasterProtos {
         return this;
       }
 
-      // optional uint64 nonce_group = 3 [default = 0];
       private long nonceGroup_ ;
       /**
        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
@@ -2280,7 +2337,6 @@ public final class MasterProtos {
         return this;
       }
 
-      // optional uint64 nonce = 4 [default = 0];
       private long nonce_ ;
       /**
        * <code>optional uint64 nonce = 4 [default = 0];</code>
@@ -2312,22 +2368,59 @@ public final class MasterProtos {
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.DeleteColumnRequest)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnRequest)
+    private static final org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest DEFAULT_INSTANCE;
     static {
-      defaultInstance = new DeleteColumnRequest(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest();
+    }
+
+    public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final com.google.protobuf.Parser<DeleteColumnRequest>
+        PARSER = new com.google.protobuf.AbstractParser<DeleteColumnRequest>() {
+      public DeleteColumnRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new DeleteColumnRequest(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<DeleteColumnRequest> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<DeleteColumnRequest> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.DeleteColumnRequest)
   }
 
-  public interface DeleteColumnResponseOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface DeleteColumnResponseOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.DeleteColumnResponse)
+      com.google.protobuf.MessageOrBuilder {
 
-    // optional uint64 proc_id = 1;
     /**
      * <code>optional uint64 proc_id = 1;</code>
      */
@@ -2340,36 +2433,28 @@ public final class MasterProtos {
   /**
    * Protobuf type {@code hbase.pb.DeleteColumnResponse}
    */
-  public static final class DeleteColumnResponse extends
-      com.google.protobuf.GeneratedMessage
-      implements DeleteColumnResponseOrBuilder {
+  public  static final class DeleteColumnResponse extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.DeleteColumnResponse)
+      DeleteColumnResponseOrBuilder {
     // Use DeleteColumnResponse.newBuilder() to construct.
-    private DeleteColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private DeleteColumnResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private DeleteColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final DeleteColumnResponse defaultInstance;
-    public static DeleteColumnResponse getDefaultInstance() {
-      return defaultInstance;
     }
-
-    public DeleteColumnResponse getDefaultInstanceForType() {
-      return defaultInstance;
+    private DeleteColumnResponse() {
+      procId_ = 0L;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private DeleteColumnResponse(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -2399,7 +2484,7 @@ public final class MasterProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -2410,30 +2495,14 @@ public final class MasterProtos {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.internal_static_hbase_pb_DeleteColumnResponse_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<DeleteColumnResponse> PARSER =
-        new com.google.protobuf.AbstractParser<DeleteColumnResponse>() {
-      public DeleteColumnResponse parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new DeleteColumnResponse(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<DeleteColumnResponse> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // optional uint64 proc_id = 1;
     public static final int PROC_ID_FIELD_NUMBER = 1;
     private long procId_;
     /**
@@ -2449,13 +2518,11 @@ public final class MasterProtos {
       return procId_;
     }
 
-    private void initFields() {
-      procId_ = 0L;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       memoizedIsInitialized = 1;
       return true;
@@ -2463,16 +2530,14 @@ public final class MasterProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         output.writeUInt64(1, procId_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
@@ -2480,19 +2545,13 @@ public final class MasterProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt64Size(1, procId_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -2508,12 +2567,10 @@ public final class MasterProtos {
         result = result && (getProcId()
             == other.getProcId());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -2523,9 +2580,10 @@ public final class MasterProtos {
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasProcId()) {
         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getProcId());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getProcId());
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -2553,46 +2611,57 @@ public final class MasterProtos {
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
@@ -2600,14 +2669,15 @@

<TRUNCATED>