You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by we...@apache.org on 2017/05/25 20:50:05 UTC

[26/31] hive git commit: HIVE-16575: Support for 'UNIQUE' and 'NOT NULL' constraints (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
index 03e492e..9cf1ee2 100644
--- a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
+++ b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
@@ -41301,6 +41301,4617 @@ public final class HbaseMetastoreProto {
     // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys)
   }
 
+  public interface UniqueConstraintsOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint> 
+        getUksList();
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint getUks(int index);
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    int getUksCount();
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraintOrBuilder> 
+        getUksOrBuilderList();
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraintOrBuilder getUksOrBuilder(
+        int index);
+  }
+  /**
+   * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.UniqueConstraints}
+   */
+  public static final class UniqueConstraints extends
+      com.google.protobuf.GeneratedMessage
+      implements UniqueConstraintsOrBuilder {
+    // Use UniqueConstraints.newBuilder() to construct.
+    private UniqueConstraints(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private UniqueConstraints(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final UniqueConstraints defaultInstance;
+    public static UniqueConstraints getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public UniqueConstraints getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private UniqueConstraints(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                uks_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              uks_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          uks_ = java.util.Collections.unmodifiableList(uks_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<UniqueConstraints> PARSER =
+        new com.google.protobuf.AbstractParser<UniqueConstraints>() {
+      public UniqueConstraints parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new UniqueConstraints(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<UniqueConstraints> getParserForType() {
+      return PARSER;
+    }
+
+    public interface UniqueConstraintOrBuilder
+        extends com.google.protobuf.MessageOrBuilder {
+
+      // required string uk_name = 1;
+      /**
+       * <code>required string uk_name = 1;</code>
+       */
+      boolean hasUkName();
+      /**
+       * <code>required string uk_name = 1;</code>
+       */
+      java.lang.String getUkName();
+      /**
+       * <code>required string uk_name = 1;</code>
+       */
+      com.google.protobuf.ByteString
+          getUkNameBytes();
+
+      // repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn> 
+          getColsList();
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn getCols(int index);
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      int getColsCount();
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder> 
+          getColsOrBuilderList();
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder getColsOrBuilder(
+          int index);
+
+      // optional bool enable_constraint = 3;
+      /**
+       * <code>optional bool enable_constraint = 3;</code>
+       */
+      boolean hasEnableConstraint();
+      /**
+       * <code>optional bool enable_constraint = 3;</code>
+       */
+      boolean getEnableConstraint();
+
+      // optional bool validate_constraint = 4;
+      /**
+       * <code>optional bool validate_constraint = 4;</code>
+       */
+      boolean hasValidateConstraint();
+      /**
+       * <code>optional bool validate_constraint = 4;</code>
+       */
+      boolean getValidateConstraint();
+
+      // optional bool rely_constraint = 5;
+      /**
+       * <code>optional bool rely_constraint = 5;</code>
+       */
+      boolean hasRelyConstraint();
+      /**
+       * <code>optional bool rely_constraint = 5;</code>
+       */
+      boolean getRelyConstraint();
+    }
+    /**
+     * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint}
+     */
+    public static final class UniqueConstraint extends
+        com.google.protobuf.GeneratedMessage
+        implements UniqueConstraintOrBuilder {
+      // Use UniqueConstraint.newBuilder() to construct.
+      private UniqueConstraint(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+        super(builder);
+        this.unknownFields = builder.getUnknownFields();
+      }
+      private UniqueConstraint(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+      private static final UniqueConstraint defaultInstance;
+      public static UniqueConstraint getDefaultInstance() {
+        return defaultInstance;
+      }
+
+      public UniqueConstraint getDefaultInstanceForType() {
+        return defaultInstance;
+      }
+
+      private final com.google.protobuf.UnknownFieldSet unknownFields;
+      @java.lang.Override
+      public final com.google.protobuf.UnknownFieldSet
+          getUnknownFields() {
+        return this.unknownFields;
+      }
+      private UniqueConstraint(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        initFields();
+        int mutable_bitField0_ = 0;
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder();
+        try {
+          boolean done = false;
+          while (!done) {
+            int tag = input.readTag();
+            switch (tag) {
+              case 0:
+                done = true;
+                break;
+              default: {
+                if (!parseUnknownField(input, unknownFields,
+                                       extensionRegistry, tag)) {
+                  done = true;
+                }
+                break;
+              }
+              case 10: {
+                bitField0_ |= 0x00000001;
+                ukName_ = input.readBytes();
+                break;
+              }
+              case 18: {
+                if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+                  cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn>();
+                  mutable_bitField0_ |= 0x00000002;
+                }
+                cols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.PARSER, extensionRegistry));
+                break;
+              }
+              case 24: {
+                bitField0_ |= 0x00000002;
+                enableConstraint_ = input.readBool();
+                break;
+              }
+              case 32: {
+                bitField0_ |= 0x00000004;
+                validateConstraint_ = input.readBool();
+                break;
+              }
+              case 40: {
+                bitField0_ |= 0x00000008;
+                relyConstraint_ = input.readBool();
+                break;
+              }
+            }
+          }
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          throw e.setUnfinishedMessage(this);
+        } catch (java.io.IOException e) {
+          throw new com.google.protobuf.InvalidProtocolBufferException(
+              e.getMessage()).setUnfinishedMessage(this);
+        } finally {
+          if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+            cols_ = java.util.Collections.unmodifiableList(cols_);
+          }
+          this.unknownFields = unknownFields.build();
+          makeExtensionsImmutable();
+        }
+      }
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.Builder.class);
+      }
+
+      public static com.google.protobuf.Parser<UniqueConstraint> PARSER =
+          new com.google.protobuf.AbstractParser<UniqueConstraint>() {
+        public UniqueConstraint parsePartialFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return new UniqueConstraint(input, extensionRegistry);
+        }
+      };
+
+      @java.lang.Override
+      public com.google.protobuf.Parser<UniqueConstraint> getParserForType() {
+        return PARSER;
+      }
+
+      public interface UniqueConstraintColumnOrBuilder
+          extends com.google.protobuf.MessageOrBuilder {
+
+        // required string column_name = 1;
+        /**
+         * <code>required string column_name = 1;</code>
+         */
+        boolean hasColumnName();
+        /**
+         * <code>required string column_name = 1;</code>
+         */
+        java.lang.String getColumnName();
+        /**
+         * <code>required string column_name = 1;</code>
+         */
+        com.google.protobuf.ByteString
+            getColumnNameBytes();
+
+        // required sint32 key_seq = 2;
+        /**
+         * <code>required sint32 key_seq = 2;</code>
+         */
+        boolean hasKeySeq();
+        /**
+         * <code>required sint32 key_seq = 2;</code>
+         */
+        int getKeySeq();
+      }
+      /**
+       * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn}
+       */
+      public static final class UniqueConstraintColumn extends
+          com.google.protobuf.GeneratedMessage
+          implements UniqueConstraintColumnOrBuilder {
+        // Use UniqueConstraintColumn.newBuilder() to construct.
+        private UniqueConstraintColumn(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+          super(builder);
+          this.unknownFields = builder.getUnknownFields();
+        }
+        private UniqueConstraintColumn(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+        private static final UniqueConstraintColumn defaultInstance;
+        public static UniqueConstraintColumn getDefaultInstance() {
+          return defaultInstance;
+        }
+
+        public UniqueConstraintColumn getDefaultInstanceForType() {
+          return defaultInstance;
+        }
+
+        private final com.google.protobuf.UnknownFieldSet unknownFields;
+        @java.lang.Override
+        public final com.google.protobuf.UnknownFieldSet
+            getUnknownFields() {
+          return this.unknownFields;
+        }
+        private UniqueConstraintColumn(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          initFields();
+          int mutable_bitField0_ = 0;
+          com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+              com.google.protobuf.UnknownFieldSet.newBuilder();
+          try {
+            boolean done = false;
+            while (!done) {
+              int tag = input.readTag();
+              switch (tag) {
+                case 0:
+                  done = true;
+                  break;
+                default: {
+                  if (!parseUnknownField(input, unknownFields,
+                                         extensionRegistry, tag)) {
+                    done = true;
+                  }
+                  break;
+                }
+                case 10: {
+                  bitField0_ |= 0x00000001;
+                  columnName_ = input.readBytes();
+                  break;
+                }
+                case 16: {
+                  bitField0_ |= 0x00000002;
+                  keySeq_ = input.readSInt32();
+                  break;
+                }
+              }
+            }
+          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+            throw e.setUnfinishedMessage(this);
+          } catch (java.io.IOException e) {
+            throw new com.google.protobuf.InvalidProtocolBufferException(
+                e.getMessage()).setUnfinishedMessage(this);
+          } finally {
+            this.unknownFields = unknownFields.build();
+            makeExtensionsImmutable();
+          }
+        }
+        public static final com.google.protobuf.Descriptors.Descriptor
+            getDescriptor() {
+          return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_UniqueConstraintColumn_descriptor;
+        }
+
+        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+            internalGetFieldAccessorTable() {
+          return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_UniqueConstraintColumn_fieldAccessorTable
+              .ensureFieldAccessorsInitialized(
+                  org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder.class);
+        }
+
+        public static com.google.protobuf.Parser<UniqueConstraintColumn> PARSER =
+            new com.google.protobuf.AbstractParser<UniqueConstraintColumn>() {
+          public UniqueConstraintColumn parsePartialFrom(
+              com.google.protobuf.CodedInputStream input,
+              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+              throws com.google.protobuf.InvalidProtocolBufferException {
+            return new UniqueConstraintColumn(input, extensionRegistry);
+          }
+        };
+
+        @java.lang.Override
+        public com.google.protobuf.Parser<UniqueConstraintColumn> getParserForType() {
+          return PARSER;
+        }
+
+        private int bitField0_;
+        // required string column_name = 1;
+        public static final int COLUMN_NAME_FIELD_NUMBER = 1;
+        private java.lang.Object columnName_;
+        /**
+         * <code>required string column_name = 1;</code>
+         */
+        public boolean hasColumnName() {
+          return ((bitField0_ & 0x00000001) == 0x00000001);
+        }
+        /**
+         * <code>required string column_name = 1;</code>
+         */
+        public java.lang.String getColumnName() {
+          java.lang.Object ref = columnName_;
+          if (ref instanceof java.lang.String) {
+            return (java.lang.String) ref;
+          } else {
+            com.google.protobuf.ByteString bs = 
+                (com.google.protobuf.ByteString) ref;
+            java.lang.String s = bs.toStringUtf8();
+            if (bs.isValidUtf8()) {
+              columnName_ = s;
+            }
+            return s;
+          }
+        }
+        /**
+         * <code>required string column_name = 1;</code>
+         */
+        public com.google.protobuf.ByteString
+            getColumnNameBytes() {
+          java.lang.Object ref = columnName_;
+          if (ref instanceof java.lang.String) {
+            com.google.protobuf.ByteString b = 
+                com.google.protobuf.ByteString.copyFromUtf8(
+                    (java.lang.String) ref);
+            columnName_ = b;
+            return b;
+          } else {
+            return (com.google.protobuf.ByteString) ref;
+          }
+        }
+
+        // required sint32 key_seq = 2;
+        public static final int KEY_SEQ_FIELD_NUMBER = 2;
+        private int keySeq_;
+        /**
+         * <code>required sint32 key_seq = 2;</code>
+         */
+        public boolean hasKeySeq() {
+          return ((bitField0_ & 0x00000002) == 0x00000002);
+        }
+        /**
+         * <code>required sint32 key_seq = 2;</code>
+         */
+        public int getKeySeq() {
+          return keySeq_;
+        }
+
+        private void initFields() {
+          columnName_ = "";
+          keySeq_ = 0;
+        }
+        private byte memoizedIsInitialized = -1;
+        public final boolean isInitialized() {
+          byte isInitialized = memoizedIsInitialized;
+          if (isInitialized != -1) return isInitialized == 1;
+
+          if (!hasColumnName()) {
+            memoizedIsInitialized = 0;
+            return false;
+          }
+          if (!hasKeySeq()) {
+            memoizedIsInitialized = 0;
+            return false;
+          }
+          memoizedIsInitialized = 1;
+          return true;
+        }
+
+        public void writeTo(com.google.protobuf.CodedOutputStream output)
+                            throws java.io.IOException {
+          getSerializedSize();
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            output.writeBytes(1, getColumnNameBytes());
+          }
+          if (((bitField0_ & 0x00000002) == 0x00000002)) {
+            output.writeSInt32(2, keySeq_);
+          }
+          getUnknownFields().writeTo(output);
+        }
+
+        private int memoizedSerializedSize = -1;
+        public int getSerializedSize() {
+          int size = memoizedSerializedSize;
+          if (size != -1) return size;
+
+          size = 0;
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            size += com.google.protobuf.CodedOutputStream
+              .computeBytesSize(1, getColumnNameBytes());
+          }
+          if (((bitField0_ & 0x00000002) == 0x00000002)) {
+            size += com.google.protobuf.CodedOutputStream
+              .computeSInt32Size(2, keySeq_);
+          }
+          size += getUnknownFields().getSerializedSize();
+          memoizedSerializedSize = size;
+          return size;
+        }
+
+        private static final long serialVersionUID = 0L;
+        @java.lang.Override
+        protected java.lang.Object writeReplace()
+            throws java.io.ObjectStreamException {
+          return super.writeReplace();
+        }
+
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(
+            com.google.protobuf.ByteString data)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return PARSER.parseFrom(data);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(
+            com.google.protobuf.ByteString data,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return PARSER.parseFrom(data, extensionRegistry);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(byte[] data)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return PARSER.parseFrom(data);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(
+            byte[] data,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return PARSER.parseFrom(data, extensionRegistry);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(java.io.InputStream input)
+            throws java.io.IOException {
+          return PARSER.parseFrom(input);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(
+            java.io.InputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+          return PARSER.parseFrom(input, extensionRegistry);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseDelimitedFrom(java.io.InputStream input)
+            throws java.io.IOException {
+          return PARSER.parseDelimitedFrom(input);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseDelimitedFrom(
+            java.io.InputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+          return PARSER.parseDelimitedFrom(input, extensionRegistry);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(
+            com.google.protobuf.CodedInputStream input)
+            throws java.io.IOException {
+          return PARSER.parseFrom(input);
+        }
+        public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parseFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+          return PARSER.parseFrom(input, extensionRegistry);
+        }
+
+        public static Builder newBuilder() { return Builder.create(); }
+        public Builder newBuilderForType() { return newBuilder(); }
+        public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn prototype) {
+          return newBuilder().mergeFrom(prototype);
+        }
+        public Builder toBuilder() { return newBuilder(this); }
+
+        @java.lang.Override
+        protected Builder newBuilderForType(
+            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          Builder builder = new Builder(parent);
+          return builder;
+        }
+        /**
+         * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn}
+         */
+        public static final class Builder extends
+            com.google.protobuf.GeneratedMessage.Builder<Builder>
+           implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder {
+          public static final com.google.protobuf.Descriptors.Descriptor
+              getDescriptor() {
+            return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_UniqueConstraintColumn_descriptor;
+          }
+
+          protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+              internalGetFieldAccessorTable() {
+            return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_UniqueConstraintColumn_fieldAccessorTable
+                .ensureFieldAccessorsInitialized(
+                    org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder.class);
+          }
+
+          // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.newBuilder()
+          private Builder() {
+            maybeForceBuilderInitialization();
+          }
+
+          private Builder(
+              com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+            super(parent);
+            maybeForceBuilderInitialization();
+          }
+          private void maybeForceBuilderInitialization() {
+            if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+            }
+          }
+          private static Builder create() {
+            return new Builder();
+          }
+
+          public Builder clear() {
+            super.clear();
+            columnName_ = "";
+            bitField0_ = (bitField0_ & ~0x00000001);
+            keySeq_ = 0;
+            bitField0_ = (bitField0_ & ~0x00000002);
+            return this;
+          }
+
+          public Builder clone() {
+            return create().mergeFrom(buildPartial());
+          }
+
+          public com.google.protobuf.Descriptors.Descriptor
+              getDescriptorForType() {
+            return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_UniqueConstraintColumn_descriptor;
+          }
+
+          public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn getDefaultInstanceForType() {
+            return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.getDefaultInstance();
+          }
+
+          public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn build() {
+            org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn result = buildPartial();
+            if (!result.isInitialized()) {
+              throw newUninitializedMessageException(result);
+            }
+            return result;
+          }
+
+          public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn buildPartial() {
+            org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn(this);
+            int from_bitField0_ = bitField0_;
+            int to_bitField0_ = 0;
+            if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+              to_bitField0_ |= 0x00000001;
+            }
+            result.columnName_ = columnName_;
+            if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+              to_bitField0_ |= 0x00000002;
+            }
+            result.keySeq_ = keySeq_;
+            result.bitField0_ = to_bitField0_;
+            onBuilt();
+            return result;
+          }
+
+          public Builder mergeFrom(com.google.protobuf.Message other) {
+            if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn) {
+              return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn)other);
+            } else {
+              super.mergeFrom(other);
+              return this;
+            }
+          }
+
+          public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn other) {
+            if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.getDefaultInstance()) return this;
+            if (other.hasColumnName()) {
+              bitField0_ |= 0x00000001;
+              columnName_ = other.columnName_;
+              onChanged();
+            }
+            if (other.hasKeySeq()) {
+              setKeySeq(other.getKeySeq());
+            }
+            this.mergeUnknownFields(other.getUnknownFields());
+            return this;
+          }
+
+          public final boolean isInitialized() {
+            if (!hasColumnName()) {
+              
+              return false;
+            }
+            if (!hasKeySeq()) {
+              
+              return false;
+            }
+            return true;
+          }
+
+          public Builder mergeFrom(
+              com.google.protobuf.CodedInputStream input,
+              com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+              throws java.io.IOException {
+            org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn parsedMessage = null;
+            try {
+              parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+              parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn) e.getUnfinishedMessage();
+              throw e;
+            } finally {
+              if (parsedMessage != null) {
+                mergeFrom(parsedMessage);
+              }
+            }
+            return this;
+          }
+          private int bitField0_;
+
+          // required string column_name = 1;
+          private java.lang.Object columnName_ = "";
+          /**
+           * <code>required string column_name = 1;</code>
+           */
+          public boolean hasColumnName() {
+            return ((bitField0_ & 0x00000001) == 0x00000001);
+          }
+          /**
+           * <code>required string column_name = 1;</code>
+           */
+          public java.lang.String getColumnName() {
+            java.lang.Object ref = columnName_;
+            if (!(ref instanceof java.lang.String)) {
+              java.lang.String s = ((com.google.protobuf.ByteString) ref)
+                  .toStringUtf8();
+              columnName_ = s;
+              return s;
+            } else {
+              return (java.lang.String) ref;
+            }
+          }
+          /**
+           * <code>required string column_name = 1;</code>
+           */
+          public com.google.protobuf.ByteString
+              getColumnNameBytes() {
+            java.lang.Object ref = columnName_;
+            if (ref instanceof String) {
+              com.google.protobuf.ByteString b = 
+                  com.google.protobuf.ByteString.copyFromUtf8(
+                      (java.lang.String) ref);
+              columnName_ = b;
+              return b;
+            } else {
+              return (com.google.protobuf.ByteString) ref;
+            }
+          }
+          /**
+           * <code>required string column_name = 1;</code>
+           */
+          public Builder setColumnName(
+              java.lang.String value) {
+            if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+            columnName_ = value;
+            onChanged();
+            return this;
+          }
+          /**
+           * <code>required string column_name = 1;</code>
+           */
+          public Builder clearColumnName() {
+            bitField0_ = (bitField0_ & ~0x00000001);
+            columnName_ = getDefaultInstance().getColumnName();
+            onChanged();
+            return this;
+          }
+          /**
+           * <code>required string column_name = 1;</code>
+           */
+          public Builder setColumnNameBytes(
+              com.google.protobuf.ByteString value) {
+            if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+            columnName_ = value;
+            onChanged();
+            return this;
+          }
+
+          // required sint32 key_seq = 2;
+          private int keySeq_ ;
+          /**
+           * <code>required sint32 key_seq = 2;</code>
+           */
+          public boolean hasKeySeq() {
+            return ((bitField0_ & 0x00000002) == 0x00000002);
+          }
+          /**
+           * <code>required sint32 key_seq = 2;</code>
+           */
+          public int getKeySeq() {
+            return keySeq_;
+          }
+          /**
+           * <code>required sint32 key_seq = 2;</code>
+           */
+          public Builder setKeySeq(int value) {
+            bitField0_ |= 0x00000002;
+            keySeq_ = value;
+            onChanged();
+            return this;
+          }
+          /**
+           * <code>required sint32 key_seq = 2;</code>
+           */
+          public Builder clearKeySeq() {
+            bitField0_ = (bitField0_ & ~0x00000002);
+            keySeq_ = 0;
+            onChanged();
+            return this;
+          }
+
+          // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn)
+        }
+
+        static {
+          defaultInstance = new UniqueConstraintColumn(true);
+          defaultInstance.initFields();
+        }
+
+        // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn)
+      }
+
+      private int bitField0_;
+      // required string uk_name = 1;
+      public static final int UK_NAME_FIELD_NUMBER = 1;
+      private java.lang.Object ukName_;
+      /**
+       * <code>required string uk_name = 1;</code>
+       */
+      public boolean hasUkName() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required string uk_name = 1;</code>
+       */
+      public java.lang.String getUkName() {
+        java.lang.Object ref = ukName_;
+        if (ref instanceof java.lang.String) {
+          return (java.lang.String) ref;
+        } else {
+          com.google.protobuf.ByteString bs = 
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            ukName_ = s;
+          }
+          return s;
+        }
+      }
+      /**
+       * <code>required string uk_name = 1;</code>
+       */
+      public com.google.protobuf.ByteString
+          getUkNameBytes() {
+        java.lang.Object ref = ukName_;
+        if (ref instanceof java.lang.String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          ukName_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+
+      // repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;
+      public static final int COLS_FIELD_NUMBER = 2;
+      private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn> cols_;
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn> getColsList() {
+        return cols_;
+      }
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder> 
+          getColsOrBuilderList() {
+        return cols_;
+      }
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      public int getColsCount() {
+        return cols_.size();
+      }
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn getCols(int index) {
+        return cols_.get(index);
+      }
+      /**
+       * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+       */
+      public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder getColsOrBuilder(
+          int index) {
+        return cols_.get(index);
+      }
+
+      // optional bool enable_constraint = 3;
+      public static final int ENABLE_CONSTRAINT_FIELD_NUMBER = 3;
+      private boolean enableConstraint_;
+      /**
+       * <code>optional bool enable_constraint = 3;</code>
+       */
+      public boolean hasEnableConstraint() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional bool enable_constraint = 3;</code>
+       */
+      public boolean getEnableConstraint() {
+        return enableConstraint_;
+      }
+
+      // optional bool validate_constraint = 4;
+      public static final int VALIDATE_CONSTRAINT_FIELD_NUMBER = 4;
+      private boolean validateConstraint_;
+      /**
+       * <code>optional bool validate_constraint = 4;</code>
+       */
+      public boolean hasValidateConstraint() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional bool validate_constraint = 4;</code>
+       */
+      public boolean getValidateConstraint() {
+        return validateConstraint_;
+      }
+
+      // optional bool rely_constraint = 5;
+      public static final int RELY_CONSTRAINT_FIELD_NUMBER = 5;
+      private boolean relyConstraint_;
+      /**
+       * <code>optional bool rely_constraint = 5;</code>
+       */
+      public boolean hasRelyConstraint() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional bool rely_constraint = 5;</code>
+       */
+      public boolean getRelyConstraint() {
+        return relyConstraint_;
+      }
+
+      private void initFields() {
+        ukName_ = "";
+        cols_ = java.util.Collections.emptyList();
+        enableConstraint_ = false;
+        validateConstraint_ = false;
+        relyConstraint_ = false;
+      }
+      private byte memoizedIsInitialized = -1;
+      public final boolean isInitialized() {
+        byte isInitialized = memoizedIsInitialized;
+        if (isInitialized != -1) return isInitialized == 1;
+
+        if (!hasUkName()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+        for (int i = 0; i < getColsCount(); i++) {
+          if (!getCols(i).isInitialized()) {
+            memoizedIsInitialized = 0;
+            return false;
+          }
+        }
+        memoizedIsInitialized = 1;
+        return true;
+      }
+
+      public void writeTo(com.google.protobuf.CodedOutputStream output)
+                          throws java.io.IOException {
+        getSerializedSize();
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          output.writeBytes(1, getUkNameBytes());
+        }
+        for (int i = 0; i < cols_.size(); i++) {
+          output.writeMessage(2, cols_.get(i));
+        }
+        if (((bitField0_ & 0x00000002) == 0x00000002)) {
+          output.writeBool(3, enableConstraint_);
+        }
+        if (((bitField0_ & 0x00000004) == 0x00000004)) {
+          output.writeBool(4, validateConstraint_);
+        }
+        if (((bitField0_ & 0x00000008) == 0x00000008)) {
+          output.writeBool(5, relyConstraint_);
+        }
+        getUnknownFields().writeTo(output);
+      }
+
+      private int memoizedSerializedSize = -1;
+      public int getSerializedSize() {
+        int size = memoizedSerializedSize;
+        if (size != -1) return size;
+
+        size = 0;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeBytesSize(1, getUkNameBytes());
+        }
+        for (int i = 0; i < cols_.size(); i++) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeMessageSize(2, cols_.get(i));
+        }
+        if (((bitField0_ & 0x00000002) == 0x00000002)) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeBoolSize(3, enableConstraint_);
+        }
+        if (((bitField0_ & 0x00000004) == 0x00000004)) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeBoolSize(4, validateConstraint_);
+        }
+        if (((bitField0_ & 0x00000008) == 0x00000008)) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeBoolSize(5, relyConstraint_);
+        }
+        size += getUnknownFields().getSerializedSize();
+        memoizedSerializedSize = size;
+        return size;
+      }
+
+      private static final long serialVersionUID = 0L;
+      @java.lang.Override
+      protected java.lang.Object writeReplace()
+          throws java.io.ObjectStreamException {
+        return super.writeReplace();
+      }
+
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(
+          com.google.protobuf.ByteString data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(
+          com.google.protobuf.ByteString data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data, extensionRegistry);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(byte[] data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(
+          byte[] data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data, extensionRegistry);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input, extensionRegistry);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseDelimitedFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return PARSER.parseDelimitedFrom(input);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseDelimitedFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input);
+      }
+      public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parseFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input, extensionRegistry);
+      }
+
+      public static Builder newBuilder() { return Builder.create(); }
+      public Builder newBuilderForType() { return newBuilder(); }
+      public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint prototype) {
+        return newBuilder().mergeFrom(prototype);
+      }
+      public Builder toBuilder() { return newBuilder(this); }
+
+      @java.lang.Override
+      protected Builder newBuilderForType(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        Builder builder = new Builder(parent);
+        return builder;
+      }
+      /**
+       * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint}
+       */
+      public static final class Builder extends
+          com.google.protobuf.GeneratedMessage.Builder<Builder>
+         implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraintOrBuilder {
+        public static final com.google.protobuf.Descriptors.Descriptor
+            getDescriptor() {
+          return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_descriptor;
+        }
+
+        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+            internalGetFieldAccessorTable() {
+          return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_fieldAccessorTable
+              .ensureFieldAccessorsInitialized(
+                  org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.Builder.class);
+        }
+
+        // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.newBuilder()
+        private Builder() {
+          maybeForceBuilderInitialization();
+        }
+
+        private Builder(
+            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          super(parent);
+          maybeForceBuilderInitialization();
+        }
+        private void maybeForceBuilderInitialization() {
+          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+            getColsFieldBuilder();
+          }
+        }
+        private static Builder create() {
+          return new Builder();
+        }
+
+        public Builder clear() {
+          super.clear();
+          ukName_ = "";
+          bitField0_ = (bitField0_ & ~0x00000001);
+          if (colsBuilder_ == null) {
+            cols_ = java.util.Collections.emptyList();
+            bitField0_ = (bitField0_ & ~0x00000002);
+          } else {
+            colsBuilder_.clear();
+          }
+          enableConstraint_ = false;
+          bitField0_ = (bitField0_ & ~0x00000004);
+          validateConstraint_ = false;
+          bitField0_ = (bitField0_ & ~0x00000008);
+          relyConstraint_ = false;
+          bitField0_ = (bitField0_ & ~0x00000010);
+          return this;
+        }
+
+        public Builder clone() {
+          return create().mergeFrom(buildPartial());
+        }
+
+        public com.google.protobuf.Descriptors.Descriptor
+            getDescriptorForType() {
+          return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_UniqueConstraint_descriptor;
+        }
+
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint getDefaultInstanceForType() {
+          return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.getDefaultInstance();
+        }
+
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint build() {
+          org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint result = buildPartial();
+          if (!result.isInitialized()) {
+            throw newUninitializedMessageException(result);
+          }
+          return result;
+        }
+
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint buildPartial() {
+          org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint(this);
+          int from_bitField0_ = bitField0_;
+          int to_bitField0_ = 0;
+          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+            to_bitField0_ |= 0x00000001;
+          }
+          result.ukName_ = ukName_;
+          if (colsBuilder_ == null) {
+            if (((bitField0_ & 0x00000002) == 0x00000002)) {
+              cols_ = java.util.Collections.unmodifiableList(cols_);
+              bitField0_ = (bitField0_ & ~0x00000002);
+            }
+            result.cols_ = cols_;
+          } else {
+            result.cols_ = colsBuilder_.build();
+          }
+          if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+            to_bitField0_ |= 0x00000002;
+          }
+          result.enableConstraint_ = enableConstraint_;
+          if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+            to_bitField0_ |= 0x00000004;
+          }
+          result.validateConstraint_ = validateConstraint_;
+          if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+            to_bitField0_ |= 0x00000008;
+          }
+          result.relyConstraint_ = relyConstraint_;
+          result.bitField0_ = to_bitField0_;
+          onBuilt();
+          return result;
+        }
+
+        public Builder mergeFrom(com.google.protobuf.Message other) {
+          if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint) {
+            return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint)other);
+          } else {
+            super.mergeFrom(other);
+            return this;
+          }
+        }
+
+        public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint other) {
+          if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.getDefaultInstance()) return this;
+          if (other.hasUkName()) {
+            bitField0_ |= 0x00000001;
+            ukName_ = other.ukName_;
+            onChanged();
+          }
+          if (colsBuilder_ == null) {
+            if (!other.cols_.isEmpty()) {
+              if (cols_.isEmpty()) {
+                cols_ = other.cols_;
+                bitField0_ = (bitField0_ & ~0x00000002);
+              } else {
+                ensureColsIsMutable();
+                cols_.addAll(other.cols_);
+              }
+              onChanged();
+            }
+          } else {
+            if (!other.cols_.isEmpty()) {
+              if (colsBuilder_.isEmpty()) {
+                colsBuilder_.dispose();
+                colsBuilder_ = null;
+                cols_ = other.cols_;
+                bitField0_ = (bitField0_ & ~0x00000002);
+                colsBuilder_ = 
+                  com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                     getColsFieldBuilder() : null;
+              } else {
+                colsBuilder_.addAllMessages(other.cols_);
+              }
+            }
+          }
+          if (other.hasEnableConstraint()) {
+            setEnableConstraint(other.getEnableConstraint());
+          }
+          if (other.hasValidateConstraint()) {
+            setValidateConstraint(other.getValidateConstraint());
+          }
+          if (other.hasRelyConstraint()) {
+            setRelyConstraint(other.getRelyConstraint());
+          }
+          this.mergeUnknownFields(other.getUnknownFields());
+          return this;
+        }
+
+        public final boolean isInitialized() {
+          if (!hasUkName()) {
+            
+            return false;
+          }
+          for (int i = 0; i < getColsCount(); i++) {
+            if (!getCols(i).isInitialized()) {
+              
+              return false;
+            }
+          }
+          return true;
+        }
+
+        public Builder mergeFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+          org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint parsedMessage = null;
+          try {
+            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+            parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint) e.getUnfinishedMessage();
+            throw e;
+          } finally {
+            if (parsedMessage != null) {
+              mergeFrom(parsedMessage);
+            }
+          }
+          return this;
+        }
+        private int bitField0_;
+
+        // required string uk_name = 1;
+        private java.lang.Object ukName_ = "";
+        /**
+         * <code>required string uk_name = 1;</code>
+         */
+        public boolean hasUkName() {
+          return ((bitField0_ & 0x00000001) == 0x00000001);
+        }
+        /**
+         * <code>required string uk_name = 1;</code>
+         */
+        public java.lang.String getUkName() {
+          java.lang.Object ref = ukName_;
+          if (!(ref instanceof java.lang.String)) {
+            java.lang.String s = ((com.google.protobuf.ByteString) ref)
+                .toStringUtf8();
+            ukName_ = s;
+            return s;
+          } else {
+            return (java.lang.String) ref;
+          }
+        }
+        /**
+         * <code>required string uk_name = 1;</code>
+         */
+        public com.google.protobuf.ByteString
+            getUkNameBytes() {
+          java.lang.Object ref = ukName_;
+          if (ref instanceof String) {
+            com.google.protobuf.ByteString b = 
+                com.google.protobuf.ByteString.copyFromUtf8(
+                    (java.lang.String) ref);
+            ukName_ = b;
+            return b;
+          } else {
+            return (com.google.protobuf.ByteString) ref;
+          }
+        }
+        /**
+         * <code>required string uk_name = 1;</code>
+         */
+        public Builder setUkName(
+            java.lang.String value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+          ukName_ = value;
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>required string uk_name = 1;</code>
+         */
+        public Builder clearUkName() {
+          bitField0_ = (bitField0_ & ~0x00000001);
+          ukName_ = getDefaultInstance().getUkName();
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>required string uk_name = 1;</code>
+         */
+        public Builder setUkNameBytes(
+            com.google.protobuf.ByteString value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+          ukName_ = value;
+          onChanged();
+          return this;
+        }
+
+        // repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;
+        private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn> cols_ =
+          java.util.Collections.emptyList();
+        private void ensureColsIsMutable() {
+          if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+            cols_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn>(cols_);
+            bitField0_ |= 0x00000002;
+           }
+        }
+
+        private com.google.protobuf.RepeatedFieldBuilder<
+            org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder> colsBuilder_;
+
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn> getColsList() {
+          if (colsBuilder_ == null) {
+            return java.util.Collections.unmodifiableList(cols_);
+          } else {
+            return colsBuilder_.getMessageList();
+          }
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public int getColsCount() {
+          if (colsBuilder_ == null) {
+            return cols_.size();
+          } else {
+            return colsBuilder_.getCount();
+          }
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn getCols(int index) {
+          if (colsBuilder_ == null) {
+            return cols_.get(index);
+          } else {
+            return colsBuilder_.getMessage(index);
+          }
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder setCols(
+            int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn value) {
+          if (colsBuilder_ == null) {
+            if (value == null) {
+              throw new NullPointerException();
+            }
+            ensureColsIsMutable();
+            cols_.set(index, value);
+            onChanged();
+          } else {
+            colsBuilder_.setMessage(index, value);
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder setCols(
+            int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder builderForValue) {
+          if (colsBuilder_ == null) {
+            ensureColsIsMutable();
+            cols_.set(index, builderForValue.build());
+            onChanged();
+          } else {
+            colsBuilder_.setMessage(index, builderForValue.build());
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder addCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn value) {
+          if (colsBuilder_ == null) {
+            if (value == null) {
+              throw new NullPointerException();
+            }
+            ensureColsIsMutable();
+            cols_.add(value);
+            onChanged();
+          } else {
+            colsBuilder_.addMessage(value);
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder addCols(
+            int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn value) {
+          if (colsBuilder_ == null) {
+            if (value == null) {
+              throw new NullPointerException();
+            }
+            ensureColsIsMutable();
+            cols_.add(index, value);
+            onChanged();
+          } else {
+            colsBuilder_.addMessage(index, value);
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder addCols(
+            org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder builderForValue) {
+          if (colsBuilder_ == null) {
+            ensureColsIsMutable();
+            cols_.add(builderForValue.build());
+            onChanged();
+          } else {
+            colsBuilder_.addMessage(builderForValue.build());
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder addCols(
+            int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder builderForValue) {
+          if (colsBuilder_ == null) {
+            ensureColsIsMutable();
+            cols_.add(index, builderForValue.build());
+            onChanged();
+          } else {
+            colsBuilder_.addMessage(index, builderForValue.build());
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder addAllCols(
+            java.lang.Iterable<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn> values) {
+          if (colsBuilder_ == null) {
+            ensureColsIsMutable();
+            super.addAll(values, cols_);
+            onChanged();
+          } else {
+            colsBuilder_.addAllMessages(values);
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder clearCols() {
+          if (colsBuilder_ == null) {
+            cols_ = java.util.Collections.emptyList();
+            bitField0_ = (bitField0_ & ~0x00000002);
+            onChanged();
+          } else {
+            colsBuilder_.clear();
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public Builder removeCols(int index) {
+          if (colsBuilder_ == null) {
+            ensureColsIsMutable();
+            cols_.remove(index);
+            onChanged();
+          } else {
+            colsBuilder_.remove(index);
+          }
+          return this;
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder getColsBuilder(
+            int index) {
+          return getColsFieldBuilder().getBuilder(index);
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder getColsOrBuilder(
+            int index) {
+          if (colsBuilder_ == null) {
+            return cols_.get(index);  } else {
+            return colsBuilder_.getMessageOrBuilder(index);
+          }
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder> 
+             getColsOrBuilderList() {
+          if (colsBuilder_ != null) {
+            return colsBuilder_.getMessageOrBuilderList();
+          } else {
+            return java.util.Collections.unmodifiableList(cols_);
+          }
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder addColsBuilder() {
+          return getColsFieldBuilder().addBuilder(
+              org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.getDefaultInstance());
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder addColsBuilder(
+            int index) {
+          return getColsFieldBuilder().addBuilder(
+              index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.getDefaultInstance());
+        }
+        /**
+         * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn cols = 2;</code>
+         */
+        public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder> 
+             getColsBuilderList() {
+          return getColsFieldBuilder().getBuilderList();
+        }
+        private com.google.protobuf.RepeatedFieldBuilder<
+            org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder> 
+            getColsFieldBuilder() {
+          if (colsBuilder_ == null) {
+            colsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+                org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumnOrBuilder>(
+                    cols_,
+                    ((bitField0_ & 0x00000002) == 0x00000002),
+                    getParentForChildren(),
+                    isClean());
+            cols_ = null;
+          }
+          return colsBuilder_;
+        }
+
+        // optional bool enable_constraint = 3;
+        private boolean enableConstraint_ ;
+        /**
+         * <code>optional bool enable_constraint = 3;</code>
+         */
+        public boolean hasEnableConstraint() {
+          return ((bitField0_ & 0x00000004) == 0x00000004);
+        }
+        /**
+         * <code>optional bool enable_constraint = 3;</code>
+         */
+        public boolean getEnableConstraint() {
+          return enableConstraint_;
+        }
+        /**
+         * <code>optional bool enable_constraint = 3;</code>
+         */
+        public Builder setEnableConstraint(boolean value) {
+          bitField0_ |= 0x00000004;
+          enableConstraint_ = value;
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>optional bool enable_constraint = 3;</code>
+         */
+        public Builder clearEnableConstraint() {
+          bitField0_ = (bitField0_ & ~0x00000004);
+          enableConstraint_ = false;
+          onChanged();
+          return this;
+        }
+
+        // optional bool validate_constraint = 4;
+        private boolean validateConstraint_ ;
+        /**
+         * <code>optional bool validate_constraint = 4;</code>
+         */
+        public boolean hasValidateConstraint() {
+          return ((bitField0_ & 0x00000008) == 0x00000008);
+        }
+        /**
+         * <code>optional bool validate_constraint = 4;</code>
+         */
+        public boolean getValidateConstraint() {
+          return validateConstraint_;
+        }
+        /**
+         * <code>optional bool validate_constraint = 4;</code>
+         */
+        public Builder setValidateConstraint(boolean value) {
+          bitField0_ |= 0x00000008;
+          validateConstraint_ = value;
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>optional bool validate_constraint = 4;</code>
+         */
+        public Builder clearValidateConstraint() {
+          bitField0_ = (bitField0_ & ~0x00000008);
+          validateConstraint_ = false;
+          onChanged();
+          return this;
+        }
+
+        // optional bool rely_constraint = 5;
+        private boolean relyConstraint_ ;
+        /**
+         * <code>optional bool rely_constraint = 5;</code>
+         */
+        public boolean hasRelyConstraint() {
+          return ((bitField0_ & 0x00000010) == 0x00000010);
+        }
+        /**
+         * <code>optional bool rely_constraint = 5;</code>
+         */
+        public boolean getRelyConstraint() {
+          return relyConstraint_;
+        }
+        /**
+         * <code>optional bool rely_constraint = 5;</code>
+         */
+        public Builder setRelyConstraint(boolean value) {
+          bitField0_ |= 0x00000010;
+          relyConstraint_ = value;
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>optional bool rely_constraint = 5;</code>
+         */
+        public Builder clearRelyConstraint() {
+          bitField0_ = (bitField0_ & ~0x00000010);
+          relyConstraint_ = false;
+          onChanged();
+          return this;
+        }
+
+        // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint)
+      }
+
+      static {
+        defaultInstance = new UniqueConstraint(true);
+        defaultInstance.initFields();
+      }
+
+      // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint)
+    }
+
+    // repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;
+    public static final int UKS_FIELD_NUMBER = 1;
+    private java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint> uks_;
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    public java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint> getUksList() {
+      return uks_;
+    }
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraintOrBuilder> 
+        getUksOrBuilderList() {
+      return uks_;
+    }
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    public int getUksCount() {
+      return uks_.size();
+    }
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraint getUks(int index) {
+      return uks_.get(index);
+    }
+    /**
+     * <code>repeated .org.apache.hadoop.hive.metastore.hbase.UniqueConstraints.UniqueConstraint uks = 1;</code>
+     */
+    public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.UniqueConstraintOrBuilder getUksOrBuilder(
+        int index) {
+      return uks_.get(index);
+    }
+
+    private void initFields() {
+      uks_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      for (int i = 0; i < getUksCount(); i++) {
+        if (!getUks(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < uks_.size(); i++) {
+        output.writeMessage(1, uks_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      for (int i = 0; i < uks_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, uks_.get(i));
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.UniqueConstraints}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraintsOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getUksFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        if (uksBuilder_ == null) {
+          uks_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+        } else {
+          uksBuilder_.clear();
+        }
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_UniqueConstraints_descriptor;
+      }
+
+      public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints getDefaultInstanceForType() {
+        return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints build() {
+        org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints buildPartial() {
+        org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.UniqueConstraints(this);
+        int from_bitField0_ = bitField0_;
+        if (uksBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            uks_ = java.util.Collections.unmodifiableList(uks_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.uks_ = uks_;
+        } else {
+          result.uks_ = uksBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.prot

<TRUNCATED>