You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2017/05/25 10:29:11 UTC

[16/21] hive git commit: HIVE-16575: Support for 'UNIQUE' and 'NOT NULL' constraints (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index c21ded1..20aeb96 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -174,6 +174,10 @@ class SQLPrimaryKey;
 
 class SQLForeignKey;
 
+class SQLUniqueConstraint;
+
+class SQLNotNullConstraint;
+
 class Type;
 
 class HiveObjectRef;
@@ -272,12 +276,24 @@ class ForeignKeysRequest;
 
 class ForeignKeysResponse;
 
+class UniqueConstraintsRequest;
+
+class UniqueConstraintsResponse;
+
+class NotNullConstraintsRequest;
+
+class NotNullConstraintsResponse;
+
 class DropConstraintRequest;
 
 class AddPrimaryKeyRequest;
 
 class AddForeignKeyRequest;
 
+class AddUniqueConstraintRequest;
+
+class AddNotNullConstraintRequest;
+
 class PartitionsByExprResult;
 
 class PartitionsByExprRequest;
@@ -762,6 +778,176 @@ inline std::ostream& operator<<(std::ostream& out, const SQLForeignKey& obj)
   return out;
 }
 
+typedef struct _SQLUniqueConstraint__isset {
+  _SQLUniqueConstraint__isset() : table_db(false), table_name(false), column_name(false), key_seq(false), uk_name(false), enable_cstr(false), validate_cstr(false), rely_cstr(false) {}
+  bool table_db :1;
+  bool table_name :1;
+  bool column_name :1;
+  bool key_seq :1;
+  bool uk_name :1;
+  bool enable_cstr :1;
+  bool validate_cstr :1;
+  bool rely_cstr :1;
+} _SQLUniqueConstraint__isset;
+
+class SQLUniqueConstraint {
+ public:
+
+  SQLUniqueConstraint(const SQLUniqueConstraint&);
+  SQLUniqueConstraint& operator=(const SQLUniqueConstraint&);
+  SQLUniqueConstraint() : table_db(), table_name(), column_name(), key_seq(0), uk_name(), enable_cstr(0), validate_cstr(0), rely_cstr(0) {
+  }
+
+  virtual ~SQLUniqueConstraint() throw();
+  std::string table_db;
+  std::string table_name;
+  std::string column_name;
+  int32_t key_seq;
+  std::string uk_name;
+  bool enable_cstr;
+  bool validate_cstr;
+  bool rely_cstr;
+
+  _SQLUniqueConstraint__isset __isset;
+
+  void __set_table_db(const std::string& val);
+
+  void __set_table_name(const std::string& val);
+
+  void __set_column_name(const std::string& val);
+
+  void __set_key_seq(const int32_t val);
+
+  void __set_uk_name(const std::string& val);
+
+  void __set_enable_cstr(const bool val);
+
+  void __set_validate_cstr(const bool val);
+
+  void __set_rely_cstr(const bool val);
+
+  bool operator == (const SQLUniqueConstraint & rhs) const
+  {
+    if (!(table_db == rhs.table_db))
+      return false;
+    if (!(table_name == rhs.table_name))
+      return false;
+    if (!(column_name == rhs.column_name))
+      return false;
+    if (!(key_seq == rhs.key_seq))
+      return false;
+    if (!(uk_name == rhs.uk_name))
+      return false;
+    if (!(enable_cstr == rhs.enable_cstr))
+      return false;
+    if (!(validate_cstr == rhs.validate_cstr))
+      return false;
+    if (!(rely_cstr == rhs.rely_cstr))
+      return false;
+    return true;
+  }
+  bool operator != (const SQLUniqueConstraint &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const SQLUniqueConstraint & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(SQLUniqueConstraint &a, SQLUniqueConstraint &b);
+
+inline std::ostream& operator<<(std::ostream& out, const SQLUniqueConstraint& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+typedef struct _SQLNotNullConstraint__isset {
+  _SQLNotNullConstraint__isset() : table_db(false), table_name(false), column_name(false), nn_name(false), enable_cstr(false), validate_cstr(false), rely_cstr(false) {}
+  bool table_db :1;
+  bool table_name :1;
+  bool column_name :1;
+  bool nn_name :1;
+  bool enable_cstr :1;
+  bool validate_cstr :1;
+  bool rely_cstr :1;
+} _SQLNotNullConstraint__isset;
+
+class SQLNotNullConstraint {
+ public:
+
+  SQLNotNullConstraint(const SQLNotNullConstraint&);
+  SQLNotNullConstraint& operator=(const SQLNotNullConstraint&);
+  SQLNotNullConstraint() : table_db(), table_name(), column_name(), nn_name(), enable_cstr(0), validate_cstr(0), rely_cstr(0) {
+  }
+
+  virtual ~SQLNotNullConstraint() throw();
+  std::string table_db;
+  std::string table_name;
+  std::string column_name;
+  std::string nn_name;
+  bool enable_cstr;
+  bool validate_cstr;
+  bool rely_cstr;
+
+  _SQLNotNullConstraint__isset __isset;
+
+  void __set_table_db(const std::string& val);
+
+  void __set_table_name(const std::string& val);
+
+  void __set_column_name(const std::string& val);
+
+  void __set_nn_name(const std::string& val);
+
+  void __set_enable_cstr(const bool val);
+
+  void __set_validate_cstr(const bool val);
+
+  void __set_rely_cstr(const bool val);
+
+  bool operator == (const SQLNotNullConstraint & rhs) const
+  {
+    if (!(table_db == rhs.table_db))
+      return false;
+    if (!(table_name == rhs.table_name))
+      return false;
+    if (!(column_name == rhs.column_name))
+      return false;
+    if (!(nn_name == rhs.nn_name))
+      return false;
+    if (!(enable_cstr == rhs.enable_cstr))
+      return false;
+    if (!(validate_cstr == rhs.validate_cstr))
+      return false;
+    if (!(rely_cstr == rhs.rely_cstr))
+      return false;
+    return true;
+  }
+  bool operator != (const SQLNotNullConstraint &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const SQLNotNullConstraint & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(SQLNotNullConstraint &a, SQLNotNullConstraint &b);
+
+inline std::ostream& operator<<(std::ostream& out, const SQLNotNullConstraint& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
 typedef struct _Type__isset {
   _Type__isset() : name(false), type1(false), type2(false), fields(false) {}
   bool name :1;
@@ -3846,6 +4032,176 @@ inline std::ostream& operator<<(std::ostream& out, const ForeignKeysResponse& ob
 }
 
 
+class UniqueConstraintsRequest {
+ public:
+
+  UniqueConstraintsRequest(const UniqueConstraintsRequest&);
+  UniqueConstraintsRequest& operator=(const UniqueConstraintsRequest&);
+  UniqueConstraintsRequest() : db_name(), tbl_name() {
+  }
+
+  virtual ~UniqueConstraintsRequest() throw();
+  std::string db_name;
+  std::string tbl_name;
+
+  void __set_db_name(const std::string& val);
+
+  void __set_tbl_name(const std::string& val);
+
+  bool operator == (const UniqueConstraintsRequest & rhs) const
+  {
+    if (!(db_name == rhs.db_name))
+      return false;
+    if (!(tbl_name == rhs.tbl_name))
+      return false;
+    return true;
+  }
+  bool operator != (const UniqueConstraintsRequest &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const UniqueConstraintsRequest & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(UniqueConstraintsRequest &a, UniqueConstraintsRequest &b);
+
+inline std::ostream& operator<<(std::ostream& out, const UniqueConstraintsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+
+class UniqueConstraintsResponse {
+ public:
+
+  UniqueConstraintsResponse(const UniqueConstraintsResponse&);
+  UniqueConstraintsResponse& operator=(const UniqueConstraintsResponse&);
+  UniqueConstraintsResponse() {
+  }
+
+  virtual ~UniqueConstraintsResponse() throw();
+  std::vector<SQLUniqueConstraint>  uniqueConstraints;
+
+  void __set_uniqueConstraints(const std::vector<SQLUniqueConstraint> & val);
+
+  bool operator == (const UniqueConstraintsResponse & rhs) const
+  {
+    if (!(uniqueConstraints == rhs.uniqueConstraints))
+      return false;
+    return true;
+  }
+  bool operator != (const UniqueConstraintsResponse &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const UniqueConstraintsResponse & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(UniqueConstraintsResponse &a, UniqueConstraintsResponse &b);
+
+inline std::ostream& operator<<(std::ostream& out, const UniqueConstraintsResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+
+class NotNullConstraintsRequest {
+ public:
+
+  NotNullConstraintsRequest(const NotNullConstraintsRequest&);
+  NotNullConstraintsRequest& operator=(const NotNullConstraintsRequest&);
+  NotNullConstraintsRequest() : db_name(), tbl_name() {
+  }
+
+  virtual ~NotNullConstraintsRequest() throw();
+  std::string db_name;
+  std::string tbl_name;
+
+  void __set_db_name(const std::string& val);
+
+  void __set_tbl_name(const std::string& val);
+
+  bool operator == (const NotNullConstraintsRequest & rhs) const
+  {
+    if (!(db_name == rhs.db_name))
+      return false;
+    if (!(tbl_name == rhs.tbl_name))
+      return false;
+    return true;
+  }
+  bool operator != (const NotNullConstraintsRequest &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const NotNullConstraintsRequest & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(NotNullConstraintsRequest &a, NotNullConstraintsRequest &b);
+
+inline std::ostream& operator<<(std::ostream& out, const NotNullConstraintsRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+
+class NotNullConstraintsResponse {
+ public:
+
+  NotNullConstraintsResponse(const NotNullConstraintsResponse&);
+  NotNullConstraintsResponse& operator=(const NotNullConstraintsResponse&);
+  NotNullConstraintsResponse() {
+  }
+
+  virtual ~NotNullConstraintsResponse() throw();
+  std::vector<SQLNotNullConstraint>  notNullConstraints;
+
+  void __set_notNullConstraints(const std::vector<SQLNotNullConstraint> & val);
+
+  bool operator == (const NotNullConstraintsResponse & rhs) const
+  {
+    if (!(notNullConstraints == rhs.notNullConstraints))
+      return false;
+    return true;
+  }
+  bool operator != (const NotNullConstraintsResponse &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const NotNullConstraintsResponse & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(NotNullConstraintsResponse &a, NotNullConstraintsResponse &b);
+
+inline std::ostream& operator<<(std::ostream& out, const NotNullConstraintsResponse& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+
 class DropConstraintRequest {
  public:
 
@@ -3976,6 +4332,86 @@ inline std::ostream& operator<<(std::ostream& out, const AddForeignKeyRequest& o
 }
 
 
+class AddUniqueConstraintRequest {
+ public:
+
+  AddUniqueConstraintRequest(const AddUniqueConstraintRequest&);
+  AddUniqueConstraintRequest& operator=(const AddUniqueConstraintRequest&);
+  AddUniqueConstraintRequest() {
+  }
+
+  virtual ~AddUniqueConstraintRequest() throw();
+  std::vector<SQLUniqueConstraint>  uniqueConstraintCols;
+
+  void __set_uniqueConstraintCols(const std::vector<SQLUniqueConstraint> & val);
+
+  bool operator == (const AddUniqueConstraintRequest & rhs) const
+  {
+    if (!(uniqueConstraintCols == rhs.uniqueConstraintCols))
+      return false;
+    return true;
+  }
+  bool operator != (const AddUniqueConstraintRequest &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const AddUniqueConstraintRequest & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(AddUniqueConstraintRequest &a, AddUniqueConstraintRequest &b);
+
+inline std::ostream& operator<<(std::ostream& out, const AddUniqueConstraintRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+
+class AddNotNullConstraintRequest {
+ public:
+
+  AddNotNullConstraintRequest(const AddNotNullConstraintRequest&);
+  AddNotNullConstraintRequest& operator=(const AddNotNullConstraintRequest&);
+  AddNotNullConstraintRequest() {
+  }
+
+  virtual ~AddNotNullConstraintRequest() throw();
+  std::vector<SQLNotNullConstraint>  notNullConstraintCols;
+
+  void __set_notNullConstraintCols(const std::vector<SQLNotNullConstraint> & val);
+
+  bool operator == (const AddNotNullConstraintRequest & rhs) const
+  {
+    if (!(notNullConstraintCols == rhs.notNullConstraintCols))
+      return false;
+    return true;
+  }
+  bool operator != (const AddNotNullConstraintRequest &rhs) const {
+    return !(*this == rhs);
+  }
+
+  bool operator < (const AddNotNullConstraintRequest & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  virtual void printTo(std::ostream& out) const;
+};
+
+void swap(AddNotNullConstraintRequest &a, AddNotNullConstraintRequest &b);
+
+inline std::ostream& operator<<(std::ostream& out, const AddNotNullConstraintRequest& obj)
+{
+  obj.printTo(out);
+  return out;
+}
+
+
 class PartitionsByExprResult {
  public:
 

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnsRequest.java
index d89eb97..eee1e64 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AbortTxnsRequest.java
@@ -351,13 +351,13 @@ public class AbortTxnsRequest implements org.apache.thrift.TBase<AbortTxnsReques
           case 1: // TXN_IDS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list484 = iprot.readListBegin();
-                struct.txn_ids = new ArrayList<Long>(_list484.size);
-                long _elem485;
-                for (int _i486 = 0; _i486 < _list484.size; ++_i486)
+                org.apache.thrift.protocol.TList _list516 = iprot.readListBegin();
+                struct.txn_ids = new ArrayList<Long>(_list516.size);
+                long _elem517;
+                for (int _i518 = 0; _i518 < _list516.size; ++_i518)
                 {
-                  _elem485 = iprot.readI64();
-                  struct.txn_ids.add(_elem485);
+                  _elem517 = iprot.readI64();
+                  struct.txn_ids.add(_elem517);
                 }
                 iprot.readListEnd();
               }
@@ -383,9 +383,9 @@ public class AbortTxnsRequest implements org.apache.thrift.TBase<AbortTxnsReques
         oprot.writeFieldBegin(TXN_IDS_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.txn_ids.size()));
-          for (long _iter487 : struct.txn_ids)
+          for (long _iter519 : struct.txn_ids)
           {
-            oprot.writeI64(_iter487);
+            oprot.writeI64(_iter519);
           }
           oprot.writeListEnd();
         }
@@ -410,9 +410,9 @@ public class AbortTxnsRequest implements org.apache.thrift.TBase<AbortTxnsReques
       TTupleProtocol oprot = (TTupleProtocol) prot;
       {
         oprot.writeI32(struct.txn_ids.size());
-        for (long _iter488 : struct.txn_ids)
+        for (long _iter520 : struct.txn_ids)
         {
-          oprot.writeI64(_iter488);
+          oprot.writeI64(_iter520);
         }
       }
     }
@@ -421,13 +421,13 @@ public class AbortTxnsRequest implements org.apache.thrift.TBase<AbortTxnsReques
     public void read(org.apache.thrift.protocol.TProtocol prot, AbortTxnsRequest struct) throws org.apache.thrift.TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
       {
-        org.apache.thrift.protocol.TList _list489 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
-        struct.txn_ids = new ArrayList<Long>(_list489.size);
-        long _elem490;
-        for (int _i491 = 0; _i491 < _list489.size; ++_i491)
+        org.apache.thrift.protocol.TList _list521 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
+        struct.txn_ids = new ArrayList<Long>(_list521.size);
+        long _elem522;
+        for (int _i523 = 0; _i523 < _list521.size; ++_i523)
         {
-          _elem490 = iprot.readI64();
-          struct.txn_ids.add(_elem490);
+          _elem522 = iprot.readI64();
+          struct.txn_ids.add(_elem522);
         }
       }
       struct.setTxn_idsIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
index ba06a56..054cf1b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
@@ -727,13 +727,13 @@ public class AddDynamicPartitions implements org.apache.thrift.TBase<AddDynamicP
           case 4: // PARTITIONNAMES
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list542 = iprot.readListBegin();
-                struct.partitionnames = new ArrayList<String>(_list542.size);
-                String _elem543;
-                for (int _i544 = 0; _i544 < _list542.size; ++_i544)
+                org.apache.thrift.protocol.TList _list574 = iprot.readListBegin();
+                struct.partitionnames = new ArrayList<String>(_list574.size);
+                String _elem575;
+                for (int _i576 = 0; _i576 < _list574.size; ++_i576)
                 {
-                  _elem543 = iprot.readString();
-                  struct.partitionnames.add(_elem543);
+                  _elem575 = iprot.readString();
+                  struct.partitionnames.add(_elem575);
                 }
                 iprot.readListEnd();
               }
@@ -780,9 +780,9 @@ public class AddDynamicPartitions implements org.apache.thrift.TBase<AddDynamicP
         oprot.writeFieldBegin(PARTITIONNAMES_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.partitionnames.size()));
-          for (String _iter545 : struct.partitionnames)
+          for (String _iter577 : struct.partitionnames)
           {
-            oprot.writeString(_iter545);
+            oprot.writeString(_iter577);
           }
           oprot.writeListEnd();
         }
@@ -817,9 +817,9 @@ public class AddDynamicPartitions implements org.apache.thrift.TBase<AddDynamicP
       oprot.writeString(struct.tablename);
       {
         oprot.writeI32(struct.partitionnames.size());
-        for (String _iter546 : struct.partitionnames)
+        for (String _iter578 : struct.partitionnames)
         {
-          oprot.writeString(_iter546);
+          oprot.writeString(_iter578);
         }
       }
       BitSet optionals = new BitSet();
@@ -842,13 +842,13 @@ public class AddDynamicPartitions implements org.apache.thrift.TBase<AddDynamicP
       struct.tablename = iprot.readString();
       struct.setTablenameIsSet(true);
       {
-        org.apache.thrift.protocol.TList _list547 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
-        struct.partitionnames = new ArrayList<String>(_list547.size);
-        String _elem548;
-        for (int _i549 = 0; _i549 < _list547.size; ++_i549)
+        org.apache.thrift.protocol.TList _list579 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+        struct.partitionnames = new ArrayList<String>(_list579.size);
+        String _elem580;
+        for (int _i581 = 0; _i581 < _list579.size; ++_i581)
         {
-          _elem548 = iprot.readString();
-          struct.partitionnames.add(_elem548);
+          _elem580 = iprot.readString();
+          struct.partitionnames.add(_elem580);
         }
       }
       struct.setPartitionnamesIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddForeignKeyRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddForeignKeyRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddForeignKeyRequest.java
index 43f7ca7..3123787 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddForeignKeyRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddForeignKeyRequest.java
@@ -354,14 +354,14 @@ public class AddForeignKeyRequest implements org.apache.thrift.TBase<AddForeignK
           case 1: // FOREIGN_KEY_COLS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list346 = iprot.readListBegin();
-                struct.foreignKeyCols = new ArrayList<SQLForeignKey>(_list346.size);
-                SQLForeignKey _elem347;
-                for (int _i348 = 0; _i348 < _list346.size; ++_i348)
+                org.apache.thrift.protocol.TList _list362 = iprot.readListBegin();
+                struct.foreignKeyCols = new ArrayList<SQLForeignKey>(_list362.size);
+                SQLForeignKey _elem363;
+                for (int _i364 = 0; _i364 < _list362.size; ++_i364)
                 {
-                  _elem347 = new SQLForeignKey();
-                  _elem347.read(iprot);
-                  struct.foreignKeyCols.add(_elem347);
+                  _elem363 = new SQLForeignKey();
+                  _elem363.read(iprot);
+                  struct.foreignKeyCols.add(_elem363);
                 }
                 iprot.readListEnd();
               }
@@ -387,9 +387,9 @@ public class AddForeignKeyRequest implements org.apache.thrift.TBase<AddForeignK
         oprot.writeFieldBegin(FOREIGN_KEY_COLS_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.foreignKeyCols.size()));
-          for (SQLForeignKey _iter349 : struct.foreignKeyCols)
+          for (SQLForeignKey _iter365 : struct.foreignKeyCols)
           {
-            _iter349.write(oprot);
+            _iter365.write(oprot);
           }
           oprot.writeListEnd();
         }
@@ -414,9 +414,9 @@ public class AddForeignKeyRequest implements org.apache.thrift.TBase<AddForeignK
       TTupleProtocol oprot = (TTupleProtocol) prot;
       {
         oprot.writeI32(struct.foreignKeyCols.size());
-        for (SQLForeignKey _iter350 : struct.foreignKeyCols)
+        for (SQLForeignKey _iter366 : struct.foreignKeyCols)
         {
-          _iter350.write(oprot);
+          _iter366.write(oprot);
         }
       }
     }
@@ -425,14 +425,14 @@ public class AddForeignKeyRequest implements org.apache.thrift.TBase<AddForeignK
     public void read(org.apache.thrift.protocol.TProtocol prot, AddForeignKeyRequest struct) throws org.apache.thrift.TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
       {
-        org.apache.thrift.protocol.TList _list351 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-        struct.foreignKeyCols = new ArrayList<SQLForeignKey>(_list351.size);
-        SQLForeignKey _elem352;
-        for (int _i353 = 0; _i353 < _list351.size; ++_i353)
+        org.apache.thrift.protocol.TList _list367 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+        struct.foreignKeyCols = new ArrayList<SQLForeignKey>(_list367.size);
+        SQLForeignKey _elem368;
+        for (int _i369 = 0; _i369 < _list367.size; ++_i369)
         {
-          _elem352 = new SQLForeignKey();
-          _elem352.read(iprot);
-          struct.foreignKeyCols.add(_elem352);
+          _elem368 = new SQLForeignKey();
+          _elem368.read(iprot);
+          struct.foreignKeyCols.add(_elem368);
         }
       }
       struct.setForeignKeyColsIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddNotNullConstraintRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddNotNullConstraintRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddNotNullConstraintRequest.java
new file mode 100644
index 0000000..3b79e98
--- /dev/null
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddNotNullConstraintRequest.java
@@ -0,0 +1,443 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+public class AddNotNullConstraintRequest implements org.apache.thrift.TBase<AddNotNullConstraintRequest, AddNotNullConstraintRequest._Fields>, java.io.Serializable, Cloneable, Comparable<AddNotNullConstraintRequest> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddNotNullConstraintRequest");
+
+  private static final org.apache.thrift.protocol.TField NOT_NULL_CONSTRAINT_COLS_FIELD_DESC = new org.apache.thrift.protocol.TField("notNullConstraintCols", org.apache.thrift.protocol.TType.LIST, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new AddNotNullConstraintRequestStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new AddNotNullConstraintRequestTupleSchemeFactory());
+  }
+
+  private List<SQLNotNullConstraint> notNullConstraintCols; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    NOT_NULL_CONSTRAINT_COLS((short)1, "notNullConstraintCols");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // NOT_NULL_CONSTRAINT_COLS
+          return NOT_NULL_CONSTRAINT_COLS;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.NOT_NULL_CONSTRAINT_COLS, new org.apache.thrift.meta_data.FieldMetaData("notNullConstraintCols", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SQLNotNullConstraint.class))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AddNotNullConstraintRequest.class, metaDataMap);
+  }
+
+  public AddNotNullConstraintRequest() {
+  }
+
+  public AddNotNullConstraintRequest(
+    List<SQLNotNullConstraint> notNullConstraintCols)
+  {
+    this();
+    this.notNullConstraintCols = notNullConstraintCols;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public AddNotNullConstraintRequest(AddNotNullConstraintRequest other) {
+    if (other.isSetNotNullConstraintCols()) {
+      List<SQLNotNullConstraint> __this__notNullConstraintCols = new ArrayList<SQLNotNullConstraint>(other.notNullConstraintCols.size());
+      for (SQLNotNullConstraint other_element : other.notNullConstraintCols) {
+        __this__notNullConstraintCols.add(new SQLNotNullConstraint(other_element));
+      }
+      this.notNullConstraintCols = __this__notNullConstraintCols;
+    }
+  }
+
+  public AddNotNullConstraintRequest deepCopy() {
+    return new AddNotNullConstraintRequest(this);
+  }
+
+  @Override
+  public void clear() {
+    this.notNullConstraintCols = null;
+  }
+
+  public int getNotNullConstraintColsSize() {
+    return (this.notNullConstraintCols == null) ? 0 : this.notNullConstraintCols.size();
+  }
+
+  public java.util.Iterator<SQLNotNullConstraint> getNotNullConstraintColsIterator() {
+    return (this.notNullConstraintCols == null) ? null : this.notNullConstraintCols.iterator();
+  }
+
+  public void addToNotNullConstraintCols(SQLNotNullConstraint elem) {
+    if (this.notNullConstraintCols == null) {
+      this.notNullConstraintCols = new ArrayList<SQLNotNullConstraint>();
+    }
+    this.notNullConstraintCols.add(elem);
+  }
+
+  public List<SQLNotNullConstraint> getNotNullConstraintCols() {
+    return this.notNullConstraintCols;
+  }
+
+  public void setNotNullConstraintCols(List<SQLNotNullConstraint> notNullConstraintCols) {
+    this.notNullConstraintCols = notNullConstraintCols;
+  }
+
+  public void unsetNotNullConstraintCols() {
+    this.notNullConstraintCols = null;
+  }
+
+  /** Returns true if field notNullConstraintCols is set (has been assigned a value) and false otherwise */
+  public boolean isSetNotNullConstraintCols() {
+    return this.notNullConstraintCols != null;
+  }
+
+  public void setNotNullConstraintColsIsSet(boolean value) {
+    if (!value) {
+      this.notNullConstraintCols = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case NOT_NULL_CONSTRAINT_COLS:
+      if (value == null) {
+        unsetNotNullConstraintCols();
+      } else {
+        setNotNullConstraintCols((List<SQLNotNullConstraint>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case NOT_NULL_CONSTRAINT_COLS:
+      return getNotNullConstraintCols();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case NOT_NULL_CONSTRAINT_COLS:
+      return isSetNotNullConstraintCols();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof AddNotNullConstraintRequest)
+      return this.equals((AddNotNullConstraintRequest)that);
+    return false;
+  }
+
+  public boolean equals(AddNotNullConstraintRequest that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_notNullConstraintCols = true && this.isSetNotNullConstraintCols();
+    boolean that_present_notNullConstraintCols = true && that.isSetNotNullConstraintCols();
+    if (this_present_notNullConstraintCols || that_present_notNullConstraintCols) {
+      if (!(this_present_notNullConstraintCols && that_present_notNullConstraintCols))
+        return false;
+      if (!this.notNullConstraintCols.equals(that.notNullConstraintCols))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_notNullConstraintCols = true && (isSetNotNullConstraintCols());
+    list.add(present_notNullConstraintCols);
+    if (present_notNullConstraintCols)
+      list.add(notNullConstraintCols);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(AddNotNullConstraintRequest other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(isSetNotNullConstraintCols()).compareTo(other.isSetNotNullConstraintCols());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetNotNullConstraintCols()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.notNullConstraintCols, other.notNullConstraintCols);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("AddNotNullConstraintRequest(");
+    boolean first = true;
+
+    sb.append("notNullConstraintCols:");
+    if (this.notNullConstraintCols == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.notNullConstraintCols);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!isSetNotNullConstraintCols()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'notNullConstraintCols' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class AddNotNullConstraintRequestStandardSchemeFactory implements SchemeFactory {
+    public AddNotNullConstraintRequestStandardScheme getScheme() {
+      return new AddNotNullConstraintRequestStandardScheme();
+    }
+  }
+
+  private static class AddNotNullConstraintRequestStandardScheme extends StandardScheme<AddNotNullConstraintRequest> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, AddNotNullConstraintRequest struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // NOT_NULL_CONSTRAINT_COLS
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list378 = iprot.readListBegin();
+                struct.notNullConstraintCols = new ArrayList<SQLNotNullConstraint>(_list378.size);
+                SQLNotNullConstraint _elem379;
+                for (int _i380 = 0; _i380 < _list378.size; ++_i380)
+                {
+                  _elem379 = new SQLNotNullConstraint();
+                  _elem379.read(iprot);
+                  struct.notNullConstraintCols.add(_elem379);
+                }
+                iprot.readListEnd();
+              }
+              struct.setNotNullConstraintColsIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, AddNotNullConstraintRequest struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.notNullConstraintCols != null) {
+        oprot.writeFieldBegin(NOT_NULL_CONSTRAINT_COLS_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.notNullConstraintCols.size()));
+          for (SQLNotNullConstraint _iter381 : struct.notNullConstraintCols)
+          {
+            _iter381.write(oprot);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class AddNotNullConstraintRequestTupleSchemeFactory implements SchemeFactory {
+    public AddNotNullConstraintRequestTupleScheme getScheme() {
+      return new AddNotNullConstraintRequestTupleScheme();
+    }
+  }
+
+  private static class AddNotNullConstraintRequestTupleScheme extends TupleScheme<AddNotNullConstraintRequest> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, AddNotNullConstraintRequest struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      {
+        oprot.writeI32(struct.notNullConstraintCols.size());
+        for (SQLNotNullConstraint _iter382 : struct.notNullConstraintCols)
+        {
+          _iter382.write(oprot);
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, AddNotNullConstraintRequest struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      {
+        org.apache.thrift.protocol.TList _list383 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+        struct.notNullConstraintCols = new ArrayList<SQLNotNullConstraint>(_list383.size);
+        SQLNotNullConstraint _elem384;
+        for (int _i385 = 0; _i385 < _list383.size; ++_i385)
+        {
+          _elem384 = new SQLNotNullConstraint();
+          _elem384.read(iprot);
+          struct.notNullConstraintCols.add(_elem384);
+        }
+      }
+      struct.setNotNullConstraintColsIsSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
index da23f72..6e9ac48 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
@@ -704,14 +704,14 @@ public class AddPartitionsRequest implements org.apache.thrift.TBase<AddPartitio
           case 3: // PARTS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list420 = iprot.readListBegin();
-                struct.parts = new ArrayList<Partition>(_list420.size);
-                Partition _elem421;
-                for (int _i422 = 0; _i422 < _list420.size; ++_i422)
+                org.apache.thrift.protocol.TList _list452 = iprot.readListBegin();
+                struct.parts = new ArrayList<Partition>(_list452.size);
+                Partition _elem453;
+                for (int _i454 = 0; _i454 < _list452.size; ++_i454)
                 {
-                  _elem421 = new Partition();
-                  _elem421.read(iprot);
-                  struct.parts.add(_elem421);
+                  _elem453 = new Partition();
+                  _elem453.read(iprot);
+                  struct.parts.add(_elem453);
                 }
                 iprot.readListEnd();
               }
@@ -763,9 +763,9 @@ public class AddPartitionsRequest implements org.apache.thrift.TBase<AddPartitio
         oprot.writeFieldBegin(PARTS_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.parts.size()));
-          for (Partition _iter423 : struct.parts)
+          for (Partition _iter455 : struct.parts)
           {
-            _iter423.write(oprot);
+            _iter455.write(oprot);
           }
           oprot.writeListEnd();
         }
@@ -800,9 +800,9 @@ public class AddPartitionsRequest implements org.apache.thrift.TBase<AddPartitio
       oprot.writeString(struct.tblName);
       {
         oprot.writeI32(struct.parts.size());
-        for (Partition _iter424 : struct.parts)
+        for (Partition _iter456 : struct.parts)
         {
-          _iter424.write(oprot);
+          _iter456.write(oprot);
         }
       }
       oprot.writeBool(struct.ifNotExists);
@@ -824,14 +824,14 @@ public class AddPartitionsRequest implements org.apache.thrift.TBase<AddPartitio
       struct.tblName = iprot.readString();
       struct.setTblNameIsSet(true);
       {
-        org.apache.thrift.protocol.TList _list425 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-        struct.parts = new ArrayList<Partition>(_list425.size);
-        Partition _elem426;
-        for (int _i427 = 0; _i427 < _list425.size; ++_i427)
+        org.apache.thrift.protocol.TList _list457 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+        struct.parts = new ArrayList<Partition>(_list457.size);
+        Partition _elem458;
+        for (int _i459 = 0; _i459 < _list457.size; ++_i459)
         {
-          _elem426 = new Partition();
-          _elem426.read(iprot);
-          struct.parts.add(_elem426);
+          _elem458 = new Partition();
+          _elem458.read(iprot);
+          struct.parts.add(_elem458);
         }
       }
       struct.setPartsIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
index bfd483e..0dfed78 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
@@ -346,14 +346,14 @@ public class AddPartitionsResult implements org.apache.thrift.TBase<AddPartition
           case 1: // PARTITIONS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list412 = iprot.readListBegin();
-                struct.partitions = new ArrayList<Partition>(_list412.size);
-                Partition _elem413;
-                for (int _i414 = 0; _i414 < _list412.size; ++_i414)
+                org.apache.thrift.protocol.TList _list444 = iprot.readListBegin();
+                struct.partitions = new ArrayList<Partition>(_list444.size);
+                Partition _elem445;
+                for (int _i446 = 0; _i446 < _list444.size; ++_i446)
                 {
-                  _elem413 = new Partition();
-                  _elem413.read(iprot);
-                  struct.partitions.add(_elem413);
+                  _elem445 = new Partition();
+                  _elem445.read(iprot);
+                  struct.partitions.add(_elem445);
                 }
                 iprot.readListEnd();
               }
@@ -380,9 +380,9 @@ public class AddPartitionsResult implements org.apache.thrift.TBase<AddPartition
           oprot.writeFieldBegin(PARTITIONS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.partitions.size()));
-            for (Partition _iter415 : struct.partitions)
+            for (Partition _iter447 : struct.partitions)
             {
-              _iter415.write(oprot);
+              _iter447.write(oprot);
             }
             oprot.writeListEnd();
           }
@@ -414,9 +414,9 @@ public class AddPartitionsResult implements org.apache.thrift.TBase<AddPartition
       if (struct.isSetPartitions()) {
         {
           oprot.writeI32(struct.partitions.size());
-          for (Partition _iter416 : struct.partitions)
+          for (Partition _iter448 : struct.partitions)
           {
-            _iter416.write(oprot);
+            _iter448.write(oprot);
           }
         }
       }
@@ -428,14 +428,14 @@ public class AddPartitionsResult implements org.apache.thrift.TBase<AddPartition
       BitSet incoming = iprot.readBitSet(1);
       if (incoming.get(0)) {
         {
-          org.apache.thrift.protocol.TList _list417 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.partitions = new ArrayList<Partition>(_list417.size);
-          Partition _elem418;
-          for (int _i419 = 0; _i419 < _list417.size; ++_i419)
+          org.apache.thrift.protocol.TList _list449 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+          struct.partitions = new ArrayList<Partition>(_list449.size);
+          Partition _elem450;
+          for (int _i451 = 0; _i451 < _list449.size; ++_i451)
           {
-            _elem418 = new Partition();
-            _elem418.read(iprot);
-            struct.partitions.add(_elem418);
+            _elem450 = new Partition();
+            _elem450.read(iprot);
+            struct.partitions.add(_elem450);
           }
         }
         struct.setPartitionsIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPrimaryKeyRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPrimaryKeyRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPrimaryKeyRequest.java
index 987b031..55e606d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPrimaryKeyRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPrimaryKeyRequest.java
@@ -354,14 +354,14 @@ public class AddPrimaryKeyRequest implements org.apache.thrift.TBase<AddPrimaryK
           case 1: // PRIMARY_KEY_COLS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list338 = iprot.readListBegin();
-                struct.primaryKeyCols = new ArrayList<SQLPrimaryKey>(_list338.size);
-                SQLPrimaryKey _elem339;
-                for (int _i340 = 0; _i340 < _list338.size; ++_i340)
+                org.apache.thrift.protocol.TList _list354 = iprot.readListBegin();
+                struct.primaryKeyCols = new ArrayList<SQLPrimaryKey>(_list354.size);
+                SQLPrimaryKey _elem355;
+                for (int _i356 = 0; _i356 < _list354.size; ++_i356)
                 {
-                  _elem339 = new SQLPrimaryKey();
-                  _elem339.read(iprot);
-                  struct.primaryKeyCols.add(_elem339);
+                  _elem355 = new SQLPrimaryKey();
+                  _elem355.read(iprot);
+                  struct.primaryKeyCols.add(_elem355);
                 }
                 iprot.readListEnd();
               }
@@ -387,9 +387,9 @@ public class AddPrimaryKeyRequest implements org.apache.thrift.TBase<AddPrimaryK
         oprot.writeFieldBegin(PRIMARY_KEY_COLS_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.primaryKeyCols.size()));
-          for (SQLPrimaryKey _iter341 : struct.primaryKeyCols)
+          for (SQLPrimaryKey _iter357 : struct.primaryKeyCols)
           {
-            _iter341.write(oprot);
+            _iter357.write(oprot);
           }
           oprot.writeListEnd();
         }
@@ -414,9 +414,9 @@ public class AddPrimaryKeyRequest implements org.apache.thrift.TBase<AddPrimaryK
       TTupleProtocol oprot = (TTupleProtocol) prot;
       {
         oprot.writeI32(struct.primaryKeyCols.size());
-        for (SQLPrimaryKey _iter342 : struct.primaryKeyCols)
+        for (SQLPrimaryKey _iter358 : struct.primaryKeyCols)
         {
-          _iter342.write(oprot);
+          _iter358.write(oprot);
         }
       }
     }
@@ -425,14 +425,14 @@ public class AddPrimaryKeyRequest implements org.apache.thrift.TBase<AddPrimaryK
     public void read(org.apache.thrift.protocol.TProtocol prot, AddPrimaryKeyRequest struct) throws org.apache.thrift.TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
       {
-        org.apache.thrift.protocol.TList _list343 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-        struct.primaryKeyCols = new ArrayList<SQLPrimaryKey>(_list343.size);
-        SQLPrimaryKey _elem344;
-        for (int _i345 = 0; _i345 < _list343.size; ++_i345)
+        org.apache.thrift.protocol.TList _list359 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+        struct.primaryKeyCols = new ArrayList<SQLPrimaryKey>(_list359.size);
+        SQLPrimaryKey _elem360;
+        for (int _i361 = 0; _i361 < _list359.size; ++_i361)
         {
-          _elem344 = new SQLPrimaryKey();
-          _elem344.read(iprot);
-          struct.primaryKeyCols.add(_elem344);
+          _elem360 = new SQLPrimaryKey();
+          _elem360.read(iprot);
+          struct.primaryKeyCols.add(_elem360);
         }
       }
       struct.setPrimaryKeyColsIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddUniqueConstraintRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddUniqueConstraintRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddUniqueConstraintRequest.java
new file mode 100644
index 0000000..71d1451
--- /dev/null
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddUniqueConstraintRequest.java
@@ -0,0 +1,443 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+public class AddUniqueConstraintRequest implements org.apache.thrift.TBase<AddUniqueConstraintRequest, AddUniqueConstraintRequest._Fields>, java.io.Serializable, Cloneable, Comparable<AddUniqueConstraintRequest> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AddUniqueConstraintRequest");
+
+  private static final org.apache.thrift.protocol.TField UNIQUE_CONSTRAINT_COLS_FIELD_DESC = new org.apache.thrift.protocol.TField("uniqueConstraintCols", org.apache.thrift.protocol.TType.LIST, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new AddUniqueConstraintRequestStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new AddUniqueConstraintRequestTupleSchemeFactory());
+  }
+
+  private List<SQLUniqueConstraint> uniqueConstraintCols; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    UNIQUE_CONSTRAINT_COLS((short)1, "uniqueConstraintCols");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // UNIQUE_CONSTRAINT_COLS
+          return UNIQUE_CONSTRAINT_COLS;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.UNIQUE_CONSTRAINT_COLS, new org.apache.thrift.meta_data.FieldMetaData("uniqueConstraintCols", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SQLUniqueConstraint.class))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AddUniqueConstraintRequest.class, metaDataMap);
+  }
+
+  public AddUniqueConstraintRequest() {
+  }
+
+  public AddUniqueConstraintRequest(
+    List<SQLUniqueConstraint> uniqueConstraintCols)
+  {
+    this();
+    this.uniqueConstraintCols = uniqueConstraintCols;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public AddUniqueConstraintRequest(AddUniqueConstraintRequest other) {
+    if (other.isSetUniqueConstraintCols()) {
+      List<SQLUniqueConstraint> __this__uniqueConstraintCols = new ArrayList<SQLUniqueConstraint>(other.uniqueConstraintCols.size());
+      for (SQLUniqueConstraint other_element : other.uniqueConstraintCols) {
+        __this__uniqueConstraintCols.add(new SQLUniqueConstraint(other_element));
+      }
+      this.uniqueConstraintCols = __this__uniqueConstraintCols;
+    }
+  }
+
+  public AddUniqueConstraintRequest deepCopy() {
+    return new AddUniqueConstraintRequest(this);
+  }
+
+  @Override
+  public void clear() {
+    this.uniqueConstraintCols = null;
+  }
+
+  public int getUniqueConstraintColsSize() {
+    return (this.uniqueConstraintCols == null) ? 0 : this.uniqueConstraintCols.size();
+  }
+
+  public java.util.Iterator<SQLUniqueConstraint> getUniqueConstraintColsIterator() {
+    return (this.uniqueConstraintCols == null) ? null : this.uniqueConstraintCols.iterator();
+  }
+
+  public void addToUniqueConstraintCols(SQLUniqueConstraint elem) {
+    if (this.uniqueConstraintCols == null) {
+      this.uniqueConstraintCols = new ArrayList<SQLUniqueConstraint>();
+    }
+    this.uniqueConstraintCols.add(elem);
+  }
+
+  public List<SQLUniqueConstraint> getUniqueConstraintCols() {
+    return this.uniqueConstraintCols;
+  }
+
+  public void setUniqueConstraintCols(List<SQLUniqueConstraint> uniqueConstraintCols) {
+    this.uniqueConstraintCols = uniqueConstraintCols;
+  }
+
+  public void unsetUniqueConstraintCols() {
+    this.uniqueConstraintCols = null;
+  }
+
+  /** Returns true if field uniqueConstraintCols is set (has been assigned a value) and false otherwise */
+  public boolean isSetUniqueConstraintCols() {
+    return this.uniqueConstraintCols != null;
+  }
+
+  public void setUniqueConstraintColsIsSet(boolean value) {
+    if (!value) {
+      this.uniqueConstraintCols = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case UNIQUE_CONSTRAINT_COLS:
+      if (value == null) {
+        unsetUniqueConstraintCols();
+      } else {
+        setUniqueConstraintCols((List<SQLUniqueConstraint>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case UNIQUE_CONSTRAINT_COLS:
+      return getUniqueConstraintCols();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case UNIQUE_CONSTRAINT_COLS:
+      return isSetUniqueConstraintCols();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof AddUniqueConstraintRequest)
+      return this.equals((AddUniqueConstraintRequest)that);
+    return false;
+  }
+
+  public boolean equals(AddUniqueConstraintRequest that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_uniqueConstraintCols = true && this.isSetUniqueConstraintCols();
+    boolean that_present_uniqueConstraintCols = true && that.isSetUniqueConstraintCols();
+    if (this_present_uniqueConstraintCols || that_present_uniqueConstraintCols) {
+      if (!(this_present_uniqueConstraintCols && that_present_uniqueConstraintCols))
+        return false;
+      if (!this.uniqueConstraintCols.equals(that.uniqueConstraintCols))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_uniqueConstraintCols = true && (isSetUniqueConstraintCols());
+    list.add(present_uniqueConstraintCols);
+    if (present_uniqueConstraintCols)
+      list.add(uniqueConstraintCols);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(AddUniqueConstraintRequest other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(isSetUniqueConstraintCols()).compareTo(other.isSetUniqueConstraintCols());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetUniqueConstraintCols()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.uniqueConstraintCols, other.uniqueConstraintCols);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("AddUniqueConstraintRequest(");
+    boolean first = true;
+
+    sb.append("uniqueConstraintCols:");
+    if (this.uniqueConstraintCols == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.uniqueConstraintCols);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!isSetUniqueConstraintCols()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'uniqueConstraintCols' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class AddUniqueConstraintRequestStandardSchemeFactory implements SchemeFactory {
+    public AddUniqueConstraintRequestStandardScheme getScheme() {
+      return new AddUniqueConstraintRequestStandardScheme();
+    }
+  }
+
+  private static class AddUniqueConstraintRequestStandardScheme extends StandardScheme<AddUniqueConstraintRequest> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, AddUniqueConstraintRequest struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // UNIQUE_CONSTRAINT_COLS
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list370 = iprot.readListBegin();
+                struct.uniqueConstraintCols = new ArrayList<SQLUniqueConstraint>(_list370.size);
+                SQLUniqueConstraint _elem371;
+                for (int _i372 = 0; _i372 < _list370.size; ++_i372)
+                {
+                  _elem371 = new SQLUniqueConstraint();
+                  _elem371.read(iprot);
+                  struct.uniqueConstraintCols.add(_elem371);
+                }
+                iprot.readListEnd();
+              }
+              struct.setUniqueConstraintColsIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, AddUniqueConstraintRequest struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.uniqueConstraintCols != null) {
+        oprot.writeFieldBegin(UNIQUE_CONSTRAINT_COLS_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.uniqueConstraintCols.size()));
+          for (SQLUniqueConstraint _iter373 : struct.uniqueConstraintCols)
+          {
+            _iter373.write(oprot);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class AddUniqueConstraintRequestTupleSchemeFactory implements SchemeFactory {
+    public AddUniqueConstraintRequestTupleScheme getScheme() {
+      return new AddUniqueConstraintRequestTupleScheme();
+    }
+  }
+
+  private static class AddUniqueConstraintRequestTupleScheme extends TupleScheme<AddUniqueConstraintRequest> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, AddUniqueConstraintRequest struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      {
+        oprot.writeI32(struct.uniqueConstraintCols.size());
+        for (SQLUniqueConstraint _iter374 : struct.uniqueConstraintCols)
+        {
+          _iter374.write(oprot);
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, AddUniqueConstraintRequest struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      {
+        org.apache.thrift.protocol.TList _list375 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+        struct.uniqueConstraintCols = new ArrayList<SQLUniqueConstraint>(_list375.size);
+        SQLUniqueConstraint _elem376;
+        for (int _i377 = 0; _i377 < _list375.size; ++_i377)
+        {
+          _elem376 = new SQLUniqueConstraint();
+          _elem376.read(iprot);
+          struct.uniqueConstraintCols.add(_elem376);
+        }
+      }
+      struct.setUniqueConstraintColsIsSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
index 0da09bf..0b6574d 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClearFileMetadataRequest.java
@@ -351,13 +351,13 @@ public class ClearFileMetadataRequest implements org.apache.thrift.TBase<ClearFi
           case 1: // FILE_IDS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list634 = iprot.readListBegin();
-                struct.fileIds = new ArrayList<Long>(_list634.size);
-                long _elem635;
-                for (int _i636 = 0; _i636 < _list634.size; ++_i636)
+                org.apache.thrift.protocol.TList _list666 = iprot.readListBegin();
+                struct.fileIds = new ArrayList<Long>(_list666.size);
+                long _elem667;
+                for (int _i668 = 0; _i668 < _list666.size; ++_i668)
                 {
-                  _elem635 = iprot.readI64();
-                  struct.fileIds.add(_elem635);
+                  _elem667 = iprot.readI64();
+                  struct.fileIds.add(_elem667);
                 }
                 iprot.readListEnd();
               }
@@ -383,9 +383,9 @@ public class ClearFileMetadataRequest implements org.apache.thrift.TBase<ClearFi
         oprot.writeFieldBegin(FILE_IDS_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.fileIds.size()));
-          for (long _iter637 : struct.fileIds)
+          for (long _iter669 : struct.fileIds)
           {
-            oprot.writeI64(_iter637);
+            oprot.writeI64(_iter669);
           }
           oprot.writeListEnd();
         }
@@ -410,9 +410,9 @@ public class ClearFileMetadataRequest implements org.apache.thrift.TBase<ClearFi
       TTupleProtocol oprot = (TTupleProtocol) prot;
       {
         oprot.writeI32(struct.fileIds.size());
-        for (long _iter638 : struct.fileIds)
+        for (long _iter670 : struct.fileIds)
         {
-          oprot.writeI64(_iter638);
+          oprot.writeI64(_iter670);
         }
       }
     }
@@ -421,13 +421,13 @@ public class ClearFileMetadataRequest implements org.apache.thrift.TBase<ClearFi
     public void read(org.apache.thrift.protocol.TProtocol prot, ClearFileMetadataRequest struct) throws org.apache.thrift.TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
       {
-        org.apache.thrift.protocol.TList _list639 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
-        struct.fileIds = new ArrayList<Long>(_list639.size);
-        long _elem640;
-        for (int _i641 = 0; _i641 < _list639.size; ++_i641)
+        org.apache.thrift.protocol.TList _list671 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
+        struct.fileIds = new ArrayList<Long>(_list671.size);
+        long _elem672;
+        for (int _i673 = 0; _i673 < _list671.size; ++_i673)
         {
-          _elem640 = iprot.readI64();
-          struct.fileIds.add(_elem640);
+          _elem672 = iprot.readI64();
+          struct.fileIds.add(_elem672);
         }
       }
       struct.setFileIdsIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClientCapabilities.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClientCapabilities.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClientCapabilities.java
index 81534fe..19e671b 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClientCapabilities.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ClientCapabilities.java
@@ -354,13 +354,13 @@ public class ClientCapabilities implements org.apache.thrift.TBase<ClientCapabil
           case 1: // VALUES
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list650 = iprot.readListBegin();
-                struct.values = new ArrayList<ClientCapability>(_list650.size);
-                ClientCapability _elem651;
-                for (int _i652 = 0; _i652 < _list650.size; ++_i652)
+                org.apache.thrift.protocol.TList _list682 = iprot.readListBegin();
+                struct.values = new ArrayList<ClientCapability>(_list682.size);
+                ClientCapability _elem683;
+                for (int _i684 = 0; _i684 < _list682.size; ++_i684)
                 {
-                  _elem651 = org.apache.hadoop.hive.metastore.api.ClientCapability.findByValue(iprot.readI32());
-                  struct.values.add(_elem651);
+                  _elem683 = org.apache.hadoop.hive.metastore.api.ClientCapability.findByValue(iprot.readI32());
+                  struct.values.add(_elem683);
                 }
                 iprot.readListEnd();
               }
@@ -386,9 +386,9 @@ public class ClientCapabilities implements org.apache.thrift.TBase<ClientCapabil
         oprot.writeFieldBegin(VALUES_FIELD_DESC);
         {
           oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.values.size()));
-          for (ClientCapability _iter653 : struct.values)
+          for (ClientCapability _iter685 : struct.values)
           {
-            oprot.writeI32(_iter653.getValue());
+            oprot.writeI32(_iter685.getValue());
           }
           oprot.writeListEnd();
         }
@@ -413,9 +413,9 @@ public class ClientCapabilities implements org.apache.thrift.TBase<ClientCapabil
       TTupleProtocol oprot = (TTupleProtocol) prot;
       {
         oprot.writeI32(struct.values.size());
-        for (ClientCapability _iter654 : struct.values)
+        for (ClientCapability _iter686 : struct.values)
         {
-          oprot.writeI32(_iter654.getValue());
+          oprot.writeI32(_iter686.getValue());
         }
       }
     }
@@ -424,13 +424,13 @@ public class ClientCapabilities implements org.apache.thrift.TBase<ClientCapabil
     public void read(org.apache.thrift.protocol.TProtocol prot, ClientCapabilities struct) throws org.apache.thrift.TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
       {
-        org.apache.thrift.protocol.TList _list655 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32());
-        struct.values = new ArrayList<ClientCapability>(_list655.size);
-        ClientCapability _elem656;
-        for (int _i657 = 0; _i657 < _list655.size; ++_i657)
+        org.apache.thrift.protocol.TList _list687 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32());
+        struct.values = new ArrayList<ClientCapability>(_list687.size);
+        ClientCapability _elem688;
+        for (int _i689 = 0; _i689 < _list687.size; ++_i689)
         {
-          _elem656 = org.apache.hadoop.hive.metastore.api.ClientCapability.findByValue(iprot.readI32());
-          struct.values.add(_elem656);
+          _elem688 = org.apache.hadoop.hive.metastore.api.ClientCapability.findByValue(iprot.readI32());
+          struct.values.add(_elem688);
         }
       }
       struct.setValuesIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
index d3fc92a..3acb203 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CompactionRequest.java
@@ -814,15 +814,15 @@ public class CompactionRequest implements org.apache.thrift.TBase<CompactionRequ
           case 6: // PROPERTIES
             if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
               {
-                org.apache.thrift.protocol.TMap _map524 = iprot.readMapBegin();
-                struct.properties = new HashMap<String,String>(2*_map524.size);
-                String _key525;
-                String _val526;
-                for (int _i527 = 0; _i527 < _map524.size; ++_i527)
+                org.apache.thrift.protocol.TMap _map556 = iprot.readMapBegin();
+                struct.properties = new HashMap<String,String>(2*_map556.size);
+                String _key557;
+                String _val558;
+                for (int _i559 = 0; _i559 < _map556.size; ++_i559)
                 {
-                  _key525 = iprot.readString();
-                  _val526 = iprot.readString();
-                  struct.properties.put(_key525, _val526);
+                  _key557 = iprot.readString();
+                  _val558 = iprot.readString();
+                  struct.properties.put(_key557, _val558);
                 }
                 iprot.readMapEnd();
               }
@@ -878,10 +878,10 @@ public class CompactionRequest implements org.apache.thrift.TBase<CompactionRequ
           oprot.writeFieldBegin(PROPERTIES_FIELD_DESC);
           {
             oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.properties.size()));
-            for (Map.Entry<String, String> _iter528 : struct.properties.entrySet())
+            for (Map.Entry<String, String> _iter560 : struct.properties.entrySet())
             {
-              oprot.writeString(_iter528.getKey());
-              oprot.writeString(_iter528.getValue());
+              oprot.writeString(_iter560.getKey());
+              oprot.writeString(_iter560.getValue());
             }
             oprot.writeMapEnd();
           }
@@ -928,10 +928,10 @@ public class CompactionRequest implements org.apache.thrift.TBase<CompactionRequ
       if (struct.isSetProperties()) {
         {
           oprot.writeI32(struct.properties.size());
-          for (Map.Entry<String, String> _iter529 : struct.properties.entrySet())
+          for (Map.Entry<String, String> _iter561 : struct.properties.entrySet())
           {
-            oprot.writeString(_iter529.getKey());
-            oprot.writeString(_iter529.getValue());
+            oprot.writeString(_iter561.getKey());
+            oprot.writeString(_iter561.getValue());
           }
         }
       }
@@ -957,15 +957,15 @@ public class CompactionRequest implements org.apache.thrift.TBase<CompactionRequ
       }
       if (incoming.get(2)) {
         {
-          org.apache.thrift.protocol.TMap _map530 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
-          struct.properties = new HashMap<String,String>(2*_map530.size);
-          String _key531;
-          String _val532;
-          for (int _i533 = 0; _i533 < _map530.size; ++_i533)
+          org.apache.thrift.protocol.TMap _map562 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+          struct.properties = new HashMap<String,String>(2*_map562.size);
+          String _key563;
+          String _val564;
+          for (int _i565 = 0; _i565 < _map562.size; ++_i565)
           {
-            _key531 = iprot.readString();
-            _val532 = iprot.readString();
-            struct.properties.put(_key531, _val532);
+            _key563 = iprot.readString();
+            _val564 = iprot.readString();
+            struct.properties.put(_key563, _val564);
           }
         }
         struct.setPropertiesIsSet(true);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
index 96cfbd2..21290ee 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
@@ -346,14 +346,14 @@ public class DropPartitionsResult implements org.apache.thrift.TBase<DropPartiti
           case 1: // PARTITIONS
             if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
               {
-                org.apache.thrift.protocol.TList _list428 = iprot.readListBegin();
-                struct.partitions = new ArrayList<Partition>(_list428.size);
-                Partition _elem429;
-                for (int _i430 = 0; _i430 < _list428.size; ++_i430)
+                org.apache.thrift.protocol.TList _list460 = iprot.readListBegin();
+                struct.partitions = new ArrayList<Partition>(_list460.size);
+                Partition _elem461;
+                for (int _i462 = 0; _i462 < _list460.size; ++_i462)
                 {
-                  _elem429 = new Partition();
-                  _elem429.read(iprot);
-                  struct.partitions.add(_elem429);
+                  _elem461 = new Partition();
+                  _elem461.read(iprot);
+                  struct.partitions.add(_elem461);
                 }
                 iprot.readListEnd();
               }
@@ -380,9 +380,9 @@ public class DropPartitionsResult implements org.apache.thrift.TBase<DropPartiti
           oprot.writeFieldBegin(PARTITIONS_FIELD_DESC);
           {
             oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.partitions.size()));
-            for (Partition _iter431 : struct.partitions)
+            for (Partition _iter463 : struct.partitions)
             {
-              _iter431.write(oprot);
+              _iter463.write(oprot);
             }
             oprot.writeListEnd();
           }
@@ -414,9 +414,9 @@ public class DropPartitionsResult implements org.apache.thrift.TBase<DropPartiti
       if (struct.isSetPartitions()) {
         {
           oprot.writeI32(struct.partitions.size());
-          for (Partition _iter432 : struct.partitions)
+          for (Partition _iter464 : struct.partitions)
           {
-            _iter432.write(oprot);
+            _iter464.write(oprot);
           }
         }
       }
@@ -428,14 +428,14 @@ public class DropPartitionsResult implements org.apache.thrift.TBase<DropPartiti
       BitSet incoming = iprot.readBitSet(1);
       if (incoming.get(0)) {
         {
-          org.apache.thrift.protocol.TList _list433 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.partitions = new ArrayList<Partition>(_list433.size);
-          Partition _elem434;
-          for (int _i435 = 0; _i435 < _list433.size; ++_i435)
+          org.apache.thrift.protocol.TList _list465 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+          struct.partitions = new ArrayList<Partition>(_list465.size);
+          Partition _elem466;
+          for (int _i467 = 0; _i467 < _list465.size; ++_i467)
           {
-            _elem434 = new Partition();
-            _elem434.read(iprot);
-            struct.partitions.add(_elem434);
+            _elem466 = new Partition();
+            _elem466.read(iprot);
+            struct.partitions.add(_elem466);
           }
         }
         struct.setPartitionsIsSet(true);