You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2017/05/25 10:29:00 UTC

[05/21] hive git commit: HIVE-16575: Support for 'UNIQUE' and 'NOT NULL' constraints (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index f1aa9a6..2cf38b5 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -234,6 +234,64 @@ class SQLForeignKey
   ::Thrift::Struct.generate_accessors self
 end
 
+class SQLUniqueConstraint
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  TABLE_DB = 1
+  TABLE_NAME = 2
+  COLUMN_NAME = 3
+  KEY_SEQ = 4
+  UK_NAME = 5
+  ENABLE_CSTR = 6
+  VALIDATE_CSTR = 7
+  RELY_CSTR = 8
+
+  FIELDS = {
+    TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
+    TABLE_NAME => {:type => ::Thrift::Types::STRING, :name => 'table_name'},
+    COLUMN_NAME => {:type => ::Thrift::Types::STRING, :name => 'column_name'},
+    KEY_SEQ => {:type => ::Thrift::Types::I32, :name => 'key_seq'},
+    UK_NAME => {:type => ::Thrift::Types::STRING, :name => 'uk_name'},
+    ENABLE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'enable_cstr'},
+    VALIDATE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'validate_cstr'},
+    RELY_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'rely_cstr'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class SQLNotNullConstraint
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  TABLE_DB = 1
+  TABLE_NAME = 2
+  COLUMN_NAME = 3
+  NN_NAME = 4
+  ENABLE_CSTR = 5
+  VALIDATE_CSTR = 6
+  RELY_CSTR = 7
+
+  FIELDS = {
+    TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
+    TABLE_NAME => {:type => ::Thrift::Types::STRING, :name => 'table_name'},
+    COLUMN_NAME => {:type => ::Thrift::Types::STRING, :name => 'column_name'},
+    NN_NAME => {:type => ::Thrift::Types::STRING, :name => 'nn_name'},
+    ENABLE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'enable_cstr'},
+    VALIDATE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'validate_cstr'},
+    RELY_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'rely_cstr'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class Type
   include ::Thrift::Struct, ::Thrift::Struct_Union
   NAME = 1
@@ -1407,6 +1465,80 @@ class ForeignKeysResponse
   ::Thrift::Struct.generate_accessors self
 end
 
+class UniqueConstraintsRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  DB_NAME = 1
+  TBL_NAME = 2
+
+  FIELDS = {
+    DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
+    TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field db_name is unset!') unless @db_name
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field tbl_name is unset!') unless @tbl_name
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class UniqueConstraintsResponse
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  UNIQUECONSTRAINTS = 1
+
+  FIELDS = {
+    UNIQUECONSTRAINTS => {:type => ::Thrift::Types::LIST, :name => 'uniqueConstraints', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLUniqueConstraint}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field uniqueConstraints is unset!') unless @uniqueConstraints
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class NotNullConstraintsRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  DB_NAME = 1
+  TBL_NAME = 2
+
+  FIELDS = {
+    DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
+    TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field db_name is unset!') unless @db_name
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field tbl_name is unset!') unless @tbl_name
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class NotNullConstraintsResponse
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  NOTNULLCONSTRAINTS = 1
+
+  FIELDS = {
+    NOTNULLCONSTRAINTS => {:type => ::Thrift::Types::LIST, :name => 'notNullConstraints', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLNotNullConstraint}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field notNullConstraints is unset!') unless @notNullConstraints
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class DropConstraintRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
   DBNAME = 1
@@ -1464,6 +1596,40 @@ class AddForeignKeyRequest
   ::Thrift::Struct.generate_accessors self
 end
 
+class AddUniqueConstraintRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  UNIQUECONSTRAINTCOLS = 1
+
+  FIELDS = {
+    UNIQUECONSTRAINTCOLS => {:type => ::Thrift::Types::LIST, :name => 'uniqueConstraintCols', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLUniqueConstraint}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field uniqueConstraintCols is unset!') unless @uniqueConstraintCols
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class AddNotNullConstraintRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  NOTNULLCONSTRAINTCOLS = 1
+
+  FIELDS = {
+    NOTNULLCONSTRAINTCOLS => {:type => ::Thrift::Types::LIST, :name => 'notNullConstraintCols', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLNotNullConstraint}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field notNullConstraintCols is unset!') unless @notNullConstraintCols
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class PartitionsByExprResult
   include ::Thrift::Struct, ::Thrift::Struct_Union
   PARTITIONS = 1

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
index b9d0fa2..2711381 100644
--- a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
+++ b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
@@ -318,13 +318,13 @@ module ThriftHiveMetastore
       return
     end
 
-    def create_table_with_constraints(tbl, primaryKeys, foreignKeys)
-      send_create_table_with_constraints(tbl, primaryKeys, foreignKeys)
+    def create_table_with_constraints(tbl, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints)
+      send_create_table_with_constraints(tbl, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints)
       recv_create_table_with_constraints()
     end
 
-    def send_create_table_with_constraints(tbl, primaryKeys, foreignKeys)
-      send_message('create_table_with_constraints', Create_table_with_constraints_args, :tbl => tbl, :primaryKeys => primaryKeys, :foreignKeys => foreignKeys)
+    def send_create_table_with_constraints(tbl, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints)
+      send_message('create_table_with_constraints', Create_table_with_constraints_args, :tbl => tbl, :primaryKeys => primaryKeys, :foreignKeys => foreignKeys, :uniqueConstraints => uniqueConstraints, :notNullConstraints => notNullConstraints)
     end
 
     def recv_create_table_with_constraints()
@@ -384,6 +384,38 @@ module ThriftHiveMetastore
       return
     end
 
+    def add_unique_constraint(req)
+      send_add_unique_constraint(req)
+      recv_add_unique_constraint()
+    end
+
+    def send_add_unique_constraint(req)
+      send_message('add_unique_constraint', Add_unique_constraint_args, :req => req)
+    end
+
+    def recv_add_unique_constraint()
+      result = receive_message(Add_unique_constraint_result)
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      return
+    end
+
+    def add_not_null_constraint(req)
+      send_add_not_null_constraint(req)
+      recv_add_not_null_constraint()
+    end
+
+    def send_add_not_null_constraint(req)
+      send_message('add_not_null_constraint', Add_not_null_constraint_args, :req => req)
+    end
+
+    def recv_add_not_null_constraint()
+      result = receive_message(Add_not_null_constraint_result)
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      return
+    end
+
     def drop_table(dbname, name, deleteData)
       send_drop_table(dbname, name, deleteData)
       recv_drop_table()
@@ -1487,6 +1519,40 @@ module ThriftHiveMetastore
       raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_foreign_keys failed: unknown result')
     end
 
+    def get_unique_constraints(request)
+      send_get_unique_constraints(request)
+      return recv_get_unique_constraints()
+    end
+
+    def send_get_unique_constraints(request)
+      send_message('get_unique_constraints', Get_unique_constraints_args, :request => request)
+    end
+
+    def recv_get_unique_constraints()
+      result = receive_message(Get_unique_constraints_result)
+      return result.success unless result.success.nil?
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_unique_constraints failed: unknown result')
+    end
+
+    def get_not_null_constraints(request)
+      send_get_not_null_constraints(request)
+      return recv_get_not_null_constraints()
+    end
+
+    def send_get_not_null_constraints(request)
+      send_message('get_not_null_constraints', Get_not_null_constraints_args, :request => request)
+    end
+
+    def recv_get_not_null_constraints()
+      result = receive_message(Get_not_null_constraints_result)
+      return result.success unless result.success.nil?
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_not_null_constraints failed: unknown result')
+    end
+
     def update_table_column_statistics(stats_obj)
       send_update_table_column_statistics(stats_obj)
       return recv_update_table_column_statistics()
@@ -2833,7 +2899,7 @@ module ThriftHiveMetastore
       args = read_args(iprot, Create_table_with_constraints_args)
       result = Create_table_with_constraints_result.new()
       begin
-        @handler.create_table_with_constraints(args.tbl, args.primaryKeys, args.foreignKeys)
+        @handler.create_table_with_constraints(args.tbl, args.primaryKeys, args.foreignKeys, args.uniqueConstraints, args.notNullConstraints)
       rescue ::AlreadyExistsException => o1
         result.o1 = o1
       rescue ::InvalidObjectException => o2
@@ -2885,6 +2951,32 @@ module ThriftHiveMetastore
       write_result(result, oprot, 'add_foreign_key', seqid)
     end
 
+    def process_add_unique_constraint(seqid, iprot, oprot)
+      args = read_args(iprot, Add_unique_constraint_args)
+      result = Add_unique_constraint_result.new()
+      begin
+        @handler.add_unique_constraint(args.req)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'add_unique_constraint', seqid)
+    end
+
+    def process_add_not_null_constraint(seqid, iprot, oprot)
+      args = read_args(iprot, Add_not_null_constraint_args)
+      result = Add_not_null_constraint_result.new()
+      begin
+        @handler.add_not_null_constraint(args.req)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'add_not_null_constraint', seqid)
+    end
+
     def process_drop_table(seqid, iprot, oprot)
       args = read_args(iprot, Drop_table_args)
       result = Drop_table_result.new()
@@ -3750,6 +3842,32 @@ module ThriftHiveMetastore
       write_result(result, oprot, 'get_foreign_keys', seqid)
     end
 
+    def process_get_unique_constraints(seqid, iprot, oprot)
+      args = read_args(iprot, Get_unique_constraints_args)
+      result = Get_unique_constraints_result.new()
+      begin
+        result.success = @handler.get_unique_constraints(args.request)
+      rescue ::MetaException => o1
+        result.o1 = o1
+      rescue ::NoSuchObjectException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'get_unique_constraints', seqid)
+    end
+
+    def process_get_not_null_constraints(seqid, iprot, oprot)
+      args = read_args(iprot, Get_not_null_constraints_args)
+      result = Get_not_null_constraints_result.new()
+      begin
+        result.success = @handler.get_not_null_constraints(args.request)
+      rescue ::MetaException => o1
+        result.o1 = o1
+      rescue ::NoSuchObjectException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'get_not_null_constraints', seqid)
+    end
+
     def process_update_table_column_statistics(seqid, iprot, oprot)
       args = read_args(iprot, Update_table_column_statistics_args)
       result = Update_table_column_statistics_result.new()
@@ -5162,11 +5280,15 @@ module ThriftHiveMetastore
     TBL = 1
     PRIMARYKEYS = 2
     FOREIGNKEYS = 3
+    UNIQUECONSTRAINTS = 4
+    NOTNULLCONSTRAINTS = 5
 
     FIELDS = {
       TBL => {:type => ::Thrift::Types::STRUCT, :name => 'tbl', :class => ::Table},
       PRIMARYKEYS => {:type => ::Thrift::Types::LIST, :name => 'primaryKeys', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLPrimaryKey}},
-      FOREIGNKEYS => {:type => ::Thrift::Types::LIST, :name => 'foreignKeys', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLForeignKey}}
+      FOREIGNKEYS => {:type => ::Thrift::Types::LIST, :name => 'foreignKeys', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLForeignKey}},
+      UNIQUECONSTRAINTS => {:type => ::Thrift::Types::LIST, :name => 'uniqueConstraints', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLUniqueConstraint}},
+      NOTNULLCONSTRAINTS => {:type => ::Thrift::Types::LIST, :name => 'notNullConstraints', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLNotNullConstraint}}
     }
 
     def struct_fields; FIELDS; end
@@ -5301,6 +5423,74 @@ module ThriftHiveMetastore
     ::Thrift::Struct.generate_accessors self
   end
 
+  class Add_unique_constraint_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::AddUniqueConstraintRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Add_unique_constraint_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Add_not_null_constraint_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::AddNotNullConstraintRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Add_not_null_constraint_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
   class Drop_table_args
     include ::Thrift::Struct, ::Thrift::Struct_Union
     DBNAME = 1
@@ -7903,6 +8093,78 @@ module ThriftHiveMetastore
     ::Thrift::Struct.generate_accessors self
   end
 
+  class Get_unique_constraints_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQUEST = 1
+
+    FIELDS = {
+      REQUEST => {:type => ::Thrift::Types::STRUCT, :name => 'request', :class => ::UniqueConstraintsRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_unique_constraints_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::UniqueConstraintsResponse},
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::NoSuchObjectException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_not_null_constraints_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQUEST = 1
+
+    FIELDS = {
+      REQUEST => {:type => ::Thrift::Types::STRUCT, :name => 'request', :class => ::NotNullConstraintsRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_not_null_constraints_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::NotNullConstraintsResponse},
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::NoSuchObjectException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
   class Update_table_column_statistics_args
     include ::Thrift::Struct, ::Thrift::Struct_Union
     STATS_OBJ = 1

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index d9d50ab..52bfb26 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -1367,8 +1367,16 @@ public class HiveMetaStore extends ThriftHiveMetastore {
     }
 
     private void create_table_core(final RawStore ms, final Table tbl,
+        final EnvironmentContext envContext)
+            throws AlreadyExistsException, MetaException,
+            InvalidObjectException, NoSuchObjectException {
+      create_table_core(ms, tbl, envContext, null, null, null, null);
+    }
+
+    private void create_table_core(final RawStore ms, final Table tbl,
         final EnvironmentContext envContext, List<SQLPrimaryKey> primaryKeys,
-        List<SQLForeignKey> foreignKeys)
+        List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints,
+        List<SQLNotNullConstraint> notNullConstraints)
         throws AlreadyExistsException, MetaException,
         InvalidObjectException, NoSuchObjectException {
       if (!MetaStoreUtils.validateName(tbl.getTableName(), hiveConf)) {
@@ -1453,10 +1461,12 @@ public class HiveMetaStore extends ThriftHiveMetastore {
             tbl.getParameters().get(hive_metastoreConstants.DDL_TIME) == null) {
           tbl.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time));
         }
-        if (primaryKeys == null && foreignKeys == null) {
+        if (primaryKeys == null && foreignKeys == null
+                && uniqueConstraints == null && notNullConstraints == null) {
           ms.createTable(tbl);
         } else {
-          ms.createTableWithConstraints(tbl, primaryKeys, foreignKeys);
+          ms.createTableWithConstraints(tbl, primaryKeys, foreignKeys,
+              uniqueConstraints, notNullConstraints);
         }
 
         if (!transactionalListeners.isEmpty()) {
@@ -1500,7 +1510,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       boolean success = false;
       Exception ex = null;
       try {
-        create_table_core(getMS(), tbl, envContext, null, null);
+        create_table_core(getMS(), tbl, envContext);
         success = true;
       } catch (NoSuchObjectException e) {
         ex = e;
@@ -1523,13 +1533,16 @@ public class HiveMetaStore extends ThriftHiveMetastore {
 
     @Override
     public void create_table_with_constraints(final Table tbl,
-        final List<SQLPrimaryKey> primaryKeys, final List<SQLForeignKey> foreignKeys)
+        final List<SQLPrimaryKey> primaryKeys, final List<SQLForeignKey> foreignKeys,
+        List<SQLUniqueConstraint> uniqueConstraints,
+        List<SQLNotNullConstraint> notNullConstraints)
         throws AlreadyExistsException, MetaException, InvalidObjectException {
       startFunction("create_table", ": " + tbl.toString());
       boolean success = false;
       Exception ex = null;
       try {
-        create_table_core(getMS(), tbl, null, primaryKeys, foreignKeys);
+        create_table_core(getMS(), tbl, null, primaryKeys, foreignKeys,
+            uniqueConstraints, notNullConstraints);
         success = true;
       } catch (NoSuchObjectException e) {
         ex = e;
@@ -1631,6 +1644,58 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
     }
 
+    @Override
+    public void add_unique_constraint(AddUniqueConstraintRequest req)
+      throws MetaException, InvalidObjectException {
+      List<SQLUniqueConstraint> uniqueConstraintCols = req.getUniqueConstraintCols();
+      String constraintName = (uniqueConstraintCols != null && uniqueConstraintCols.size() > 0) ?
+              uniqueConstraintCols.get(0).getUk_name() : "null";
+      startFunction("add_unique_constraint", ": " + constraintName);
+      boolean success = false;
+      Exception ex = null;
+      try {
+        getMS().addUniqueConstraints(uniqueConstraintCols);
+        success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else {
+          throw newMetaException(e);
+        }
+      } finally {
+        endFunction("add_unique_constraint", success, ex, constraintName);
+      }
+    }
+
+    @Override
+    public void add_not_null_constraint(AddNotNullConstraintRequest req)
+      throws MetaException, InvalidObjectException {
+      List<SQLNotNullConstraint> notNullConstraintCols = req.getNotNullConstraintCols();
+      String constraintName = (notNullConstraintCols != null && notNullConstraintCols.size() > 0) ?
+              notNullConstraintCols.get(0).getNn_name() : "null";
+      startFunction("add_not_null_constraint", ": " + constraintName);
+      boolean success = false;
+      Exception ex = null;
+      try {
+        getMS().addNotNullConstraints(notNullConstraintCols);
+        success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else {
+          throw newMetaException(e);
+        }
+      } finally {
+        endFunction("add_not_null_constraint", success, ex, constraintName);
+      }
+    }
+
     private boolean is_table_exists(RawStore ms, String dbname, String name)
         throws MetaException {
       return (ms.getTable(dbname, name) != null);
@@ -6972,6 +7037,56 @@ public class HiveMetaStore extends ThriftHiveMetastore {
     }
 
     @Override
+    public UniqueConstraintsResponse get_unique_constraints(UniqueConstraintsRequest request)
+        throws MetaException, NoSuchObjectException, TException {
+      String db_name = request.getDb_name();
+      String tbl_name = request.getTbl_name();
+      startTableFunction("get_unique_constraints", db_name, tbl_name);
+      List<SQLUniqueConstraint> ret = null;
+      Exception ex = null;
+      try {
+        ret = getMS().getUniqueConstraints(db_name, tbl_name);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          throw newMetaException(e);
+        }
+      } finally {
+        endFunction("get_unique_constraints", ret != null, ex, tbl_name);
+      }
+      return new UniqueConstraintsResponse(ret);
+    }
+
+    @Override
+    public NotNullConstraintsResponse get_not_null_constraints(NotNullConstraintsRequest request)
+        throws MetaException, NoSuchObjectException, TException {
+      String db_name = request.getDb_name();
+      String tbl_name = request.getTbl_name();
+      startTableFunction("get_not_null_constraints", db_name, tbl_name);
+      List<SQLNotNullConstraint> ret = null;
+      Exception ex = null;
+      try {
+        ret = getMS().getNotNullConstraints(db_name, tbl_name);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          throw newMetaException(e);
+        }
+      } finally {
+        endFunction("get_not_null_constraints", ret != null, ex, tbl_name);
+      }
+      return new NotNullConstraintsResponse(ret);
+    }
+
+    @Override
     public String get_metastore_db_uuid() throws MetaException, TException {
       try {
         return getMS().getMetastoreDbUuid();
@@ -6982,7 +7097,6 @@ public class HiveMetaStore extends ThriftHiveMetastore {
     }
   }
 
-
   public static IHMSHandler newRetryingHMSHandler(IHMSHandler baseHandler, HiveConf hiveConf)
       throws MetaException {
     return newRetryingHMSHandler(baseHandler, hiveConf, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 3e6add2..0ff4c11 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -752,9 +752,11 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
 
   @Override
   public void createTableWithConstraints(Table tbl,
-    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys)
-    throws AlreadyExistsException, InvalidObjectException,
-    MetaException, NoSuchObjectException, TException {
+    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys,
+    List<SQLUniqueConstraint> uniqueConstraints,
+    List<SQLNotNullConstraint> notNullConstraints)
+        throws AlreadyExistsException, InvalidObjectException,
+        MetaException, NoSuchObjectException, TException {
     HiveMetaHook hook = getHook(tbl);
     if (hook != null) {
       hook.preCreateTable(tbl);
@@ -762,7 +764,8 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
     boolean success = false;
     try {
       // Subclasses can override this step (for example, for temporary tables)
-      client.create_table_with_constraints(tbl, primaryKeys, foreignKeys);
+      client.create_table_with_constraints(tbl, primaryKeys, foreignKeys,
+          uniqueConstraints, notNullConstraints);
       if (hook != null) {
         hook.commitCreateTable(tbl);
       }
@@ -792,7 +795,19 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
     client.add_foreign_key(new AddForeignKeyRequest(foreignKeyCols));
   }
 
-/**
+  @Override
+  public void addUniqueConstraint(List<SQLUniqueConstraint> uniqueConstraintCols) throws
+    NoSuchObjectException, MetaException, TException {
+    client.add_unique_constraint(new AddUniqueConstraintRequest(uniqueConstraintCols));
+  }
+
+  @Override
+  public void addNotNullConstraint(List<SQLNotNullConstraint> notNullConstraintCols) throws
+    NoSuchObjectException, MetaException, TException {
+    client.add_not_null_constraint(new AddNotNullConstraintRequest(notNullConstraintCols));
+  }
+
+  /**
    * @param type
    * @return true or false
    * @throws AlreadyExistsException
@@ -1632,6 +1647,18 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
     return client.get_foreign_keys(req).getForeignKeys();
   }
 
+  @Override
+  public List<SQLUniqueConstraint> getUniqueConstraints(UniqueConstraintsRequest req)
+    throws MetaException, NoSuchObjectException, TException {
+    return client.get_unique_constraints(req).getUniqueConstraints();
+  }
+
+  @Override
+  public List<SQLNotNullConstraint> getNotNullConstraints(NotNullConstraintsRequest req)
+    throws MetaException, NoSuchObjectException, TException {
+    return client.get_not_null_constraints(req).getNotNullConstraints();
+  }
+
 
   /** {@inheritDoc} */
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 9c24c23..3663305 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hive.metastore.api.MetadataPpdResult;
 import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
+import org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
 import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
@@ -82,7 +83,9 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
 import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
@@ -91,6 +94,7 @@ import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
 import org.apache.hadoop.hive.metastore.api.TxnOpenException;
+import org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 import org.apache.hadoop.hive.metastore.api.UnknownTableException;
@@ -1651,9 +1655,17 @@ public interface IMetaStoreClient {
   List<SQLForeignKey> getForeignKeys(ForeignKeysRequest request) throws MetaException,
     NoSuchObjectException, TException;
 
+  List<SQLUniqueConstraint> getUniqueConstraints(UniqueConstraintsRequest request) throws MetaException,
+    NoSuchObjectException, TException;
+
+  List<SQLNotNullConstraint> getNotNullConstraints(NotNullConstraintsRequest request) throws MetaException,
+    NoSuchObjectException, TException;
+
   void createTableWithConstraints(
     org.apache.hadoop.hive.metastore.api.Table tTbl,
-    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys)
+    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys,
+    List<SQLUniqueConstraint> uniqueConstraints,
+    List<SQLNotNullConstraint> notNullConstraints)
     throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException;
 
   void dropConstraint(String dbName, String tableName, String constraintName) throws
@@ -1665,6 +1677,12 @@ public interface IMetaStoreClient {
   void addForeignKey(List<SQLForeignKey> foreignKeyCols) throws
   MetaException, NoSuchObjectException, TException;
 
+  void addUniqueConstraint(List<SQLUniqueConstraint> uniqueConstraintCols) throws
+  MetaException, NoSuchObjectException, TException;
+
+  void addNotNullConstraint(List<SQLNotNullConstraint> notNullConstraintCols) throws
+  MetaException, NoSuchObjectException, TException;
+
   /**
    * Gets the unique id of the backing database instance used for storing metadata
    * @return unique id of the backing database instance
@@ -1672,4 +1690,5 @@ public interface IMetaStoreClient {
    * @throws TException in case of Thrift errors
    */
   String getMetastoreDbUuid() throws MetaException, TException;
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index df73693..500fba9 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -56,7 +56,9 @@ import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -1918,9 +1920,6 @@ class MetaStoreDirectSql {
       + (parent_db_name == null ? "" : " \"D2\".\"NAME\" = ?") ;
 
     queryText = queryText.trim();
-    if (queryText.endsWith("WHERE")) {
-      queryText = queryText.substring(0, queryText.length()-5);
-    }
     if (queryText.endsWith("AND")) {
       queryText = queryText.substring(0, queryText.length()-3);
     }
@@ -1986,9 +1985,6 @@ class MetaStoreDirectSql {
       + (tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? ") ;
 
     queryText = queryText.trim();
-    if (queryText.endsWith("WHERE")) {
-      queryText = queryText.substring(0, queryText.length()-5);
-    }
     if (queryText.endsWith("AND")) {
       queryText = queryText.substring(0, queryText.length()-3);
     }
@@ -2023,4 +2019,107 @@ class MetaStoreDirectSql {
     }
     return ret;
   }
+
+  public List<SQLUniqueConstraint> getUniqueConstraints(String db_name, String tbl_name)
+          throws MetaException {
+    List<SQLUniqueConstraint> ret = new ArrayList<SQLUniqueConstraint>();
+    String queryText =
+      "SELECT \"DBS\".\"NAME\", \"TBLS\".\"TBL_NAME\", \"COLUMNS_V2\".\"COLUMN_NAME\","
+      + "\"KEY_CONSTRAINTS\".\"POSITION\", "
+      + "\"KEY_CONSTRAINTS\".\"CONSTRAINT_NAME\", \"KEY_CONSTRAINTS\".\"ENABLE_VALIDATE_RELY\" "
+      + " FROM  \"TBLS\" "
+      + " INNER  JOIN \"KEY_CONSTRAINTS\" ON \"TBLS\".\"TBL_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" "
+      + " INNER JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" "
+      + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND "
+      + " \"COLUMNS_V2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" "
+      + " WHERE \"KEY_CONSTRAINTS\".\"CONSTRAINT_TYPE\" = "+ MConstraint.UNIQUE_CONSTRAINT + " AND "
+      + (db_name == null ? "" : "\"DBS\".\"NAME\" = ? AND")
+      + (tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? ") ;
+
+    queryText = queryText.trim();
+    if (queryText.endsWith("AND")) {
+      queryText = queryText.substring(0, queryText.length()-3);
+    }
+    List<String> pms = new ArrayList<String>();
+    if (db_name != null) {
+      pms.add(db_name);
+    }
+    if (tbl_name != null) {
+      pms.add(tbl_name);
+    }
+
+    Query queryParams = pm.newQuery("javax.jdo.query.SQL", queryText);
+      List<Object[]> sqlResult = ensureList(executeWithArray(
+        queryParams, pms.toArray(), queryText));
+
+    if (!sqlResult.isEmpty()) {
+      for (Object[] line : sqlResult) {
+          int enableValidateRely = extractSqlInt(line[5]);
+          boolean enable = (enableValidateRely & 4) != 0;
+          boolean validate = (enableValidateRely & 2) != 0;
+          boolean rely = (enableValidateRely & 1) != 0;
+        SQLUniqueConstraint currConstraint = new SQLUniqueConstraint(
+          extractSqlString(line[0]),
+          extractSqlString(line[1]),
+          extractSqlString(line[2]),
+          extractSqlInt(line[3]), extractSqlString(line[4]),
+          enable,
+          validate,
+          rely);
+          ret.add(currConstraint);
+      }
+    }
+    return ret;
+  }
+
+  public List<SQLNotNullConstraint> getNotNullConstraints(String db_name, String tbl_name)
+          throws MetaException {
+    List<SQLNotNullConstraint> ret = new ArrayList<SQLNotNullConstraint>();
+    String queryText =
+      "SELECT \"DBS\".\"NAME\", \"TBLS\".\"TBL_NAME\", \"COLUMNS_V2\".\"COLUMN_NAME\","
+      + "\"KEY_CONSTRAINTS\".\"CONSTRAINT_NAME\", \"KEY_CONSTRAINTS\".\"ENABLE_VALIDATE_RELY\" "
+      + " FROM  \"TBLS\" "
+      + " INNER  JOIN \"KEY_CONSTRAINTS\" ON \"TBLS\".\"TBL_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" "
+      + " INNER JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" "
+      + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND "
+      + " \"COLUMNS_V2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" "
+      + " WHERE \"KEY_CONSTRAINTS\".\"CONSTRAINT_TYPE\" = "+ MConstraint.NOT_NULL_CONSTRAINT + " AND "
+      + (db_name == null ? "" : "\"DBS\".\"NAME\" = ? AND")
+      + (tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? ") ;
+
+    queryText = queryText.trim();
+    if (queryText.endsWith("AND")) {
+      queryText = queryText.substring(0, queryText.length()-3);
+    }
+    List<String> pms = new ArrayList<String>();
+    if (db_name != null) {
+      pms.add(db_name);
+    }
+    if (tbl_name != null) {
+      pms.add(tbl_name);
+    }
+
+    Query queryParams = pm.newQuery("javax.jdo.query.SQL", queryText);
+      List<Object[]> sqlResult = ensureList(executeWithArray(
+        queryParams, pms.toArray(), queryText));
+
+    if (!sqlResult.isEmpty()) {
+      for (Object[] line : sqlResult) {
+          int enableValidateRely = extractSqlInt(line[4]);
+          boolean enable = (enableValidateRely & 4) != 0;
+          boolean validate = (enableValidateRely & 2) != 0;
+          boolean rely = (enableValidateRely & 1) != 0;
+        SQLNotNullConstraint currConstraint = new SQLNotNullConstraint(
+          extractSqlString(line[0]),
+          extractSqlString(line[1]),
+          extractSqlString(line[2]),
+          extractSqlString(line[3]),
+          enable,
+          validate,
+          rely);
+          ret.add(currConstraint);
+      }
+    }
+    return ret;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 28b1e57..b16218d 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -60,6 +60,7 @@ import javax.jdo.datastore.DataStoreCache;
 import javax.jdo.identity.IntIdentity;
 
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -108,7 +109,9 @@ import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -970,17 +973,21 @@ public class ObjectStore implements RawStore, Configurable {
 
   @Override
   public void createTableWithConstraints(Table tbl,
-    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys)
+    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys,
+    List<SQLUniqueConstraint> uniqueConstraints,
+    List<SQLNotNullConstraint> notNullConstraints)
     throws InvalidObjectException, MetaException {
     boolean success = false;
     try {
       openTransaction();
       createTable(tbl);
-      // Add primary keys and foreign keys.
-      // We need not do a deep retrieval of the Table Column Descriptor while persisting the PK/FK
-      // since this transaction involving create table is not yet committed.
+      // Add constraints.
+      // We need not do a deep retrieval of the Table Column Descriptor while persisting the
+      // constraints since this transaction involving create table is not yet committed.
       addPrimaryKeys(primaryKeys, false);
       addForeignKeys(foreignKeys, false);
+      addUniqueConstraints(uniqueConstraints, false);
+      addNotNullConstraints(notNullConstraints, false);
       success = commitTransaction();
     } finally {
       if (!success) {
@@ -3588,32 +3595,39 @@ public class ObjectStore implements RawStore, Configurable {
     String currentConstraintName = null;
 
     for (int i = 0; i < fks.size(); i++) {
-      AttachedMTableInfo nParentTable = getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), retrieveCD);
+      final String pkTableDB = HiveStringUtils.normalizeIdentifier(fks.get(i).getPktable_db());
+      final String pkTableName = HiveStringUtils.normalizeIdentifier(fks.get(i).getPktable_name());
+      final String pkColumnName =HiveStringUtils.normalizeIdentifier(fks.get(i).getPkcolumn_name());
+      final String fkTableDB = HiveStringUtils.normalizeIdentifier(fks.get(i).getFktable_db());
+      final String fkTableName = HiveStringUtils.normalizeIdentifier(fks.get(i).getFktable_name());
+      final String fkColumnName = HiveStringUtils.normalizeIdentifier(fks.get(i).getFkcolumn_name());
+
+      // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
+      // For instance, this is the case when we are creating the table.
+      AttachedMTableInfo nParentTable = getMTable(pkTableDB, pkTableName, retrieveCD);
       MTable parentTable = nParentTable.mtbl;
       if (parentTable == null) {
-        throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name());
+        throw new InvalidObjectException("Parent table not found: " + pkTableName);
       }
 
-      AttachedMTableInfo nChildTable = getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), retrieveCD);
+      AttachedMTableInfo nChildTable = getMTable(fkTableDB, fkTableName, retrieveCD);
       MTable childTable = nChildTable.mtbl;
       if (childTable == null) {
-        throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name());
+        throw new InvalidObjectException("Child table not found: " + fkTableName);
       }
 
       MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
       List<MFieldSchema> parentCols = parentCD == null ? null : parentCD.getCols();
-      int parentIntegerIndex =
-        getColumnIndexFromTableColumns(parentCols, fks.get(i).getPkcolumn_name());
+      int parentIntegerIndex = getColumnIndexFromTableColumns(parentCols, pkColumnName);
       if (parentIntegerIndex == -1) {
-        throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name());
+        throw new InvalidObjectException("Parent column not found: " + pkColumnName);
       }
 
       MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
       List<MFieldSchema> childCols = childCD.getCols();
-      int childIntegerIndex =
-        getColumnIndexFromTableColumns(childCols, fks.get(i).getFkcolumn_name());
+      int childIntegerIndex = getColumnIndexFromTableColumns(childCols, fkColumnName);
       if (childIntegerIndex == -1) {
-        throw new InvalidObjectException("Child column not found: " + fks.get(i).getFkcolumn_name());
+        throw new InvalidObjectException("Child column not found: " + fkColumnName);
       }
 
       if (fks.get(i).getFk_name() == null) {
@@ -3625,12 +3639,11 @@ public class ObjectStore implements RawStore, Configurable {
         // However, this scenario can be ignored for practical purposes because of
         // the uniqueness of the generated constraint name.
         if (fks.get(i).getKey_seq() == 1) {
-          currentConstraintName = generateConstraintName(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(),
-            fks.get(i).getPktable_db(), fks.get(i).getPktable_name(),
-            fks.get(i).getPkcolumn_name(), fks.get(i).getFkcolumn_name(), "fk");
+          currentConstraintName = generateConstraintName(
+            fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
         }
       } else {
-        currentConstraintName = fks.get(i).getFk_name();
+        currentConstraintName = HiveStringUtils.normalizeIdentifier(fks.get(i).getFk_name());
       }
       Integer updateRule = fks.get(i).getUpdate_rule();
       Integer deleteRule = fks.get(i).getDelete_rule();
@@ -3667,19 +3680,24 @@ public class ObjectStore implements RawStore, Configurable {
     String constraintName = null;
 
     for (int i = 0; i < pks.size(); i++) {
-      AttachedMTableInfo nParentTable =
-        getMTable(pks.get(i).getTable_db(), pks.get(i).getTable_name(), retrieveCD);
+      final String tableDB = HiveStringUtils.normalizeIdentifier(pks.get(i).getTable_db());
+      final String tableName = HiveStringUtils.normalizeIdentifier(pks.get(i).getTable_name());
+      final String columnName = HiveStringUtils.normalizeIdentifier(pks.get(i).getColumn_name());
+
+      // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
+      // For instance, this is the case when we are creating the table.
+      AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
       MTable parentTable = nParentTable.mtbl;
       if (parentTable == null) {
-        throw new InvalidObjectException("Parent table not found: " + pks.get(i).getTable_name());
+        throw new InvalidObjectException("Parent table not found: " + tableName);
       }
 
       MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
       int parentIntegerIndex =
-        getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), pks.get(i).getColumn_name());
+        getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
 
       if (parentIntegerIndex == -1) {
-        throw new InvalidObjectException("Parent column not found: " + pks.get(i).getColumn_name());
+        throw new InvalidObjectException("Parent column not found: " + columnName);
       }
       if (getPrimaryKeyConstraintName(
           parentTable.getDatabase().getName(), parentTable.getTableName()) != null) {
@@ -3688,11 +3706,10 @@ public class ObjectStore implements RawStore, Configurable {
       }
       if (pks.get(i).getPk_name() == null) {
         if (pks.get(i).getKey_seq() == 1) {
-          constraintName = generateConstraintName(pks.get(i).getTable_db(), pks.get(i).getTable_name(),
-            pks.get(i).getColumn_name(), "pk");
+          constraintName = generateConstraintName(tableDB, tableName, columnName, "pk");
         }
       } else {
-        constraintName = pks.get(i).getPk_name();
+        constraintName = HiveStringUtils.normalizeIdentifier(pks.get(i).getPk_name());
       }
 
       int enableValidateRely = (pks.get(i).isEnable_cstr() ? 4 : 0) +
@@ -3716,6 +3733,120 @@ public class ObjectStore implements RawStore, Configurable {
   }
 
   @Override
+  public void addUniqueConstraints(List<SQLUniqueConstraint> uks)
+          throws InvalidObjectException, MetaException {
+    addUniqueConstraints(uks, true);
+  }
+
+  private void addUniqueConstraints(List<SQLUniqueConstraint> uks, boolean retrieveCD)
+          throws InvalidObjectException, MetaException {
+    List<MConstraint> cstrs = new ArrayList<MConstraint>();
+    String constraintName = null;
+
+    for (int i = 0; i < uks.size(); i++) {
+      final String tableDB = HiveStringUtils.normalizeIdentifier(uks.get(i).getTable_db());
+      final String tableName = HiveStringUtils.normalizeIdentifier(uks.get(i).getTable_name());
+      final String columnName = HiveStringUtils.normalizeIdentifier(uks.get(i).getColumn_name());
+
+      // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
+      // For instance, this is the case when we are creating the table.
+      AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
+      MTable parentTable = nParentTable.mtbl;
+      if (parentTable == null) {
+        throw new InvalidObjectException("Parent table not found: " + tableName);
+      }
+
+      MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
+      int parentIntegerIndex =
+          getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
+      if (parentIntegerIndex == -1) {
+        throw new InvalidObjectException("Parent column not found: " + columnName);
+      }
+      if (uks.get(i).getUk_name() == null) {
+        if (uks.get(i).getKey_seq() == 1) {
+            constraintName = generateConstraintName(tableDB, tableName, columnName, "uk");
+        }
+      } else {
+        constraintName = HiveStringUtils.normalizeIdentifier(uks.get(i).getUk_name());
+      }
+
+      int enableValidateRely = (uks.get(i).isEnable_cstr() ? 4 : 0) +
+          (uks.get(i).isValidate_cstr() ? 2 : 0) + (uks.get(i).isRely_cstr() ? 1 : 0);
+      MConstraint muk = new MConstraint(
+        constraintName,
+        MConstraint.UNIQUE_CONSTRAINT,
+        uks.get(i).getKey_seq(),
+        null,
+        null,
+        enableValidateRely,
+        parentTable,
+        null,
+        parentCD,
+        null,
+        null,
+        parentIntegerIndex);
+      cstrs.add(muk);
+    }
+    pm.makePersistentAll(cstrs);
+  }
+
+  @Override
+  public void addNotNullConstraints(List<SQLNotNullConstraint> nns)
+          throws InvalidObjectException, MetaException {
+    addNotNullConstraints(nns, true);
+  }
+
+  private void addNotNullConstraints(List<SQLNotNullConstraint> nns, boolean retrieveCD)
+          throws InvalidObjectException, MetaException {
+    List<MConstraint> cstrs = new ArrayList<MConstraint>();
+    String constraintName = null;
+
+    for (int i = 0; i < nns.size(); i++) {
+      final String tableDB = HiveStringUtils.normalizeIdentifier(nns.get(i).getTable_db());
+      final String tableName = HiveStringUtils.normalizeIdentifier(nns.get(i).getTable_name());
+      final String columnName = HiveStringUtils.normalizeIdentifier(nns.get(i).getColumn_name());
+
+      // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
+      // For instance, this is the case when we are creating the table.
+      AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
+      MTable parentTable = nParentTable.mtbl;
+      if (parentTable == null) {
+        throw new InvalidObjectException("Parent table not found: " + tableName);
+      }
+
+      MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
+      int parentIntegerIndex =
+          getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
+      if (parentIntegerIndex == -1) {
+        throw new InvalidObjectException("Parent column not found: " + columnName);
+      }
+      if (nns.get(i).getNn_name() == null) {
+        constraintName = generateConstraintName(tableDB, tableName, columnName, "nn");
+      } else {
+        constraintName = HiveStringUtils.normalizeIdentifier(nns.get(i).getNn_name());
+      }
+
+      int enableValidateRely = (nns.get(i).isEnable_cstr() ? 4 : 0) +
+          (nns.get(i).isValidate_cstr() ? 2 : 0) + (nns.get(i).isRely_cstr() ? 1 : 0);
+      MConstraint muk = new MConstraint(
+        constraintName,
+        MConstraint.NOT_NULL_CONSTRAINT,
+        1, // Not null constraint should reference a single column
+        null,
+        null,
+        enableValidateRely,
+        parentTable,
+        null,
+        parentCD,
+        null,
+        null,
+        parentIntegerIndex);
+      cstrs.add(muk);
+    }
+    pm.makePersistentAll(cstrs);
+  }
+
+  @Override
   public boolean addIndex(Index index) throws InvalidObjectException,
       MetaException {
     boolean commited = false;
@@ -8308,7 +8439,7 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       return getPrimaryKeysInternal(db_name, tbl_name, true, true);
     } catch (NoSuchObjectException e) {
-      throw new MetaException(e.getMessage());
+      throw new MetaException(ExceptionUtils.getStackTrace(e));
     }
   }
 
@@ -8397,7 +8528,7 @@ public class ObjectStore implements RawStore, Configurable {
       return getForeignKeysInternal(parent_db_name,
         parent_tbl_name, foreign_db_name, foreign_tbl_name, true, true);
     } catch (NoSuchObjectException e) {
-      throw new MetaException(e.getMessage());
+      throw new MetaException(ExceptionUtils.getStackTrace(e));
     }
   }
 
@@ -8514,6 +8645,143 @@ public class ObjectStore implements RawStore, Configurable {
   }
 
   @Override
+  public List<SQLUniqueConstraint> getUniqueConstraints(String db_name, String tbl_name)
+          throws MetaException {
+    try {
+      return getUniqueConstraintsInternal(db_name, tbl_name, true, true);
+    } catch (NoSuchObjectException e) {
+      throw new MetaException(ExceptionUtils.getStackTrace(e));
+    }
+  }
+
+  protected List<SQLUniqueConstraint> getUniqueConstraintsInternal(final String db_name_input,
+      final String tbl_name_input, boolean allowSql, boolean allowJdo)
+          throws MetaException, NoSuchObjectException {
+    final String db_name = HiveStringUtils.normalizeIdentifier(db_name_input);
+    final String tbl_name = HiveStringUtils.normalizeIdentifier(tbl_name_input);
+    return new GetListHelper<SQLUniqueConstraint>(db_name, tbl_name, allowSql, allowJdo) {
+
+      @Override
+      protected List<SQLUniqueConstraint> getSqlResult(GetHelper<List<SQLUniqueConstraint>> ctx)
+              throws MetaException {
+        return directSql.getUniqueConstraints(db_name, tbl_name);
+      }
+
+      @Override
+      protected List<SQLUniqueConstraint> getJdoResult(GetHelper<List<SQLUniqueConstraint>> ctx)
+              throws MetaException, NoSuchObjectException {
+        return getUniqueConstraintsViaJdo(db_name, tbl_name);
+      }
+    }.run(false);
+  }
+
+  private List<SQLUniqueConstraint> getUniqueConstraintsViaJdo(String db_name, String tbl_name)
+          throws MetaException {
+    boolean commited = false;
+    List<SQLUniqueConstraint> uniqueConstraints = null;
+    Query query = null;
+    try {
+      openTransaction();
+      query = pm.newQuery(MConstraint.class,
+        "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&"
+        + " constraintType == MConstraint.UNIQUE_CONSTRAINT");
+      query.declareParameters("java.lang.String tbl_name, java.lang.String db_name");
+      Collection<?> constraints = (Collection<?>) query.execute(tbl_name, db_name);
+      pm.retrieveAll(constraints);
+      uniqueConstraints = new ArrayList<SQLUniqueConstraint>();
+      for (Iterator<?> i = constraints.iterator(); i.hasNext();) {
+        MConstraint currPK = (MConstraint) i.next();
+        int enableValidateRely = currPK.getEnableValidateRely();
+        boolean enable = (enableValidateRely & 4) != 0;
+        boolean validate = (enableValidateRely & 2) != 0;
+        boolean rely = (enableValidateRely & 1) != 0;
+        uniqueConstraints.add(new SQLUniqueConstraint(db_name,
+         tbl_name,
+         currPK.getParentColumn().getCols().get(currPK.getParentIntegerIndex()).getName(),
+         currPK.getPosition(),
+         currPK.getConstraintName(), enable, validate, rely));
+      }
+      commited = commitTransaction();
+    } finally {
+      if (!commited) {
+        rollbackTransaction();
+      }
+      if (query != null) {
+        query.closeAll();
+      }
+    }
+    return uniqueConstraints;
+  }
+
+  @Override
+  public List<SQLNotNullConstraint> getNotNullConstraints(String db_name, String tbl_name)
+          throws MetaException {
+    try {
+      return getNotNullConstraintsInternal(db_name, tbl_name, true, true);
+    } catch (NoSuchObjectException e) {
+      throw new MetaException(ExceptionUtils.getStackTrace(e));
+    }
+  }
+
+  protected List<SQLNotNullConstraint> getNotNullConstraintsInternal(final String db_name_input,
+      final String tbl_name_input, boolean allowSql, boolean allowJdo)
+          throws MetaException, NoSuchObjectException {
+    final String db_name = HiveStringUtils.normalizeIdentifier(db_name_input);
+    final String tbl_name = HiveStringUtils.normalizeIdentifier(tbl_name_input);
+    return new GetListHelper<SQLNotNullConstraint>(db_name, tbl_name, allowSql, allowJdo) {
+
+      @Override
+      protected List<SQLNotNullConstraint> getSqlResult(GetHelper<List<SQLNotNullConstraint>> ctx)
+              throws MetaException {
+        return directSql.getNotNullConstraints(db_name, tbl_name);
+      }
+
+      @Override
+      protected List<SQLNotNullConstraint> getJdoResult(GetHelper<List<SQLNotNullConstraint>> ctx)
+              throws MetaException, NoSuchObjectException {
+        return getNotNullConstraintsViaJdo(db_name, tbl_name);
+      }
+    }.run(false);
+  }
+
+  private List<SQLNotNullConstraint> getNotNullConstraintsViaJdo(String db_name, String tbl_name)
+          throws MetaException {
+    boolean commited = false;
+    List<SQLNotNullConstraint> notNullConstraints = null;
+    Query query = null;
+    try {
+      openTransaction();
+      query = pm.newQuery(MConstraint.class,
+        "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&"
+        + " constraintType == MConstraint.NOT_NULL_CONSTRAINT");
+      query.declareParameters("java.lang.String tbl_name, java.lang.String db_name");
+      Collection<?> constraints = (Collection<?>) query.execute(tbl_name, db_name);
+      pm.retrieveAll(constraints);
+      notNullConstraints = new ArrayList<SQLNotNullConstraint>();
+      for (Iterator<?> i = constraints.iterator(); i.hasNext();) {
+        MConstraint currPK = (MConstraint) i.next();
+        int enableValidateRely = currPK.getEnableValidateRely();
+        boolean enable = (enableValidateRely & 4) != 0;
+        boolean validate = (enableValidateRely & 2) != 0;
+        boolean rely = (enableValidateRely & 1) != 0;
+        notNullConstraints.add(new SQLNotNullConstraint(db_name,
+         tbl_name,
+         currPK.getParentColumn().getCols().get(currPK.getParentIntegerIndex()).getName(),
+         currPK.getConstraintName(), enable, validate, rely));
+      }
+      commited = commitTransaction();
+    } finally {
+      if (!commited) {
+        rollbackTransaction();
+      }
+      if (query != null) {
+        query.closeAll();
+      }
+    }
+    return notNullConstraints;
+  }
+
+  @Override
   public void dropConstraint(String dbName, String tableName,
     String constraintName) throws NoSuchObjectException {
     boolean success = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 964ffb2..67506f2 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -54,7 +54,9 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.Type;
@@ -702,8 +704,15 @@ public interface RawStore extends Configurable {
     String parent_tbl_name, String foreign_db_name, String foreign_tbl_name)
     throws MetaException;
 
+  public abstract List<SQLUniqueConstraint> getUniqueConstraints(String db_name,
+    String tbl_name) throws MetaException;
+
+  public abstract List<SQLNotNullConstraint> getNotNullConstraints(String db_name,
+    String tbl_name) throws MetaException;
+
   void createTableWithConstraints(Table tbl, List<SQLPrimaryKey> primaryKeys,
-    List<SQLForeignKey> foreignKeys) throws InvalidObjectException, MetaException;
+    List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints,
+    List<SQLNotNullConstraint> notNullConstraints) throws InvalidObjectException, MetaException;
 
   void dropConstraint(String dbName, String tableName, String constraintName) throws NoSuchObjectException;
 
@@ -711,6 +720,10 @@ public interface RawStore extends Configurable {
 
   void addForeignKeys(List<SQLForeignKey> fks) throws InvalidObjectException, MetaException;
 
+  void addUniqueConstraints(List<SQLUniqueConstraint> uks) throws InvalidObjectException, MetaException;
+
+  void addNotNullConstraints(List<SQLNotNullConstraint> nns) throws InvalidObjectException, MetaException;
+
   /**
    * Gets the unique id of the backing datastore for the metadata
    * @return

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index 78aab91..f00f08f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -70,7 +70,9 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
@@ -1846,11 +1848,28 @@ public class CachedStore implements RawStore, Configurable {
   }
 
   @Override
+  public List<SQLUniqueConstraint> getUniqueConstraints(String db_name, String tbl_name)
+      throws MetaException {
+    // TODO constraintCache
+    return rawStore.getUniqueConstraints(db_name, tbl_name);
+  }
+
+  @Override
+  public List<SQLNotNullConstraint> getNotNullConstraints(String db_name, String tbl_name)
+      throws MetaException {
+    // TODO constraintCache
+    return rawStore.getNotNullConstraints(db_name, tbl_name);
+  }
+
+  @Override
   public void createTableWithConstraints(Table tbl,
-      List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys)
+      List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys,
+      List<SQLUniqueConstraint> uniqueConstraints,
+      List<SQLNotNullConstraint> notNullConstraints)
       throws InvalidObjectException, MetaException {
     // TODO constraintCache
-    rawStore.createTableWithConstraints(tbl, primaryKeys, foreignKeys);
+    rawStore.createTableWithConstraints(tbl, primaryKeys, foreignKeys,
+            uniqueConstraints, notNullConstraints);
     SharedCache.addTableToCache(HiveStringUtils.normalizeIdentifier(tbl.getDbName()),
         HiveStringUtils.normalizeIdentifier(tbl.getTableName()), tbl);
   }
@@ -1877,6 +1896,20 @@ public class CachedStore implements RawStore, Configurable {
   }
 
   @Override
+  public void addUniqueConstraints(List<SQLUniqueConstraint> uks)
+      throws InvalidObjectException, MetaException {
+    // TODO constraintCache
+    rawStore.addUniqueConstraints(uks);
+  }
+
+  @Override
+  public void addNotNullConstraints(List<SQLNotNullConstraint> nns)
+      throws InvalidObjectException, MetaException {
+    // TODO constraintCache
+    rawStore.addNotNullConstraints(nns);
+  }
+
+  @Override
   public Map<String, List<ColumnStatisticsObj>> getColStatsForTablePartitions(String dbName,
       String tableName) throws MetaException, NoSuchObjectException {
     return rawStore.getColStatsForTablePartitions(dbName, tableName);

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index e687a69..d711805 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -24,7 +24,9 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -147,6 +149,8 @@ public class HBaseReadWrite implements MetadataStore {
   private final static byte[] MASTER_KEY_COL = "mk".getBytes(HBaseUtils.ENCODING);
   private final static byte[] PRIMARY_KEY_COL = "pk".getBytes(HBaseUtils.ENCODING);
   private final static byte[] FOREIGN_KEY_COL = "fk".getBytes(HBaseUtils.ENCODING);
+  private final static byte[] UNIQUE_CONSTRAINT_COL = "uk".getBytes(HBaseUtils.ENCODING);
+  private final static byte[] NOT_NULL_CONSTRAINT_COL = "nn".getBytes(HBaseUtils.ENCODING);
   private final static byte[] GLOBAL_PRIVS_KEY = "gp".getBytes(HBaseUtils.ENCODING);
   private final static byte[] SEQUENCES_KEY = "seq".getBytes(HBaseUtils.ENCODING);
   private final static int TABLES_TO_CACHE = 10;
@@ -2550,7 +2554,7 @@ public class HBaseReadWrite implements MetadataStore {
   }
 
   /**********************************************************************************************
-   * Constraints (pk/fk) related methods
+   * Constraints related methods
    *********************************************************************************************/
 
   /**
@@ -2582,6 +2586,34 @@ public class HBaseReadWrite implements MetadataStore {
   }
 
   /**
+   * Fetch a unique constraint
+   * @param dbName database the table is in
+   * @param tableName table name
+   * @return List of unique constraints objects
+   * @throws IOException if there's a read error
+   */
+  List<SQLUniqueConstraint> getUniqueConstraint(String dbName, String tableName) throws IOException {
+    byte[] key = HBaseUtils.buildKey(dbName, tableName);
+    byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, UNIQUE_CONSTRAINT_COL);
+    if (serialized == null) return null;
+    return HBaseUtils.deserializeUniqueConstraint(dbName, tableName, serialized);
+  }
+
+  /**
+   * Fetch a not null constraint
+   * @param dbName database the table is in
+   * @param tableName table name
+   * @return List of not null constraints objects
+   * @throws IOException if there's a read error
+   */
+  List<SQLNotNullConstraint> getNotNullConstraint(String dbName, String tableName) throws IOException {
+    byte[] key = HBaseUtils.buildKey(dbName, tableName);
+    byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, NOT_NULL_CONSTRAINT_COL);
+    if (serialized == null) return null;
+    return HBaseUtils.deserializeNotNullConstraint(dbName, tableName, serialized);
+  }
+
+  /**
    * Create a primary key on a table.
    * @param pk Primary key for this table
    * @throws IOException if unable to write the data to the store.
@@ -2605,6 +2637,26 @@ public class HBaseReadWrite implements MetadataStore {
   }
 
   /**
+   * Create one or more unique constraints on a table.
+   * @param uks Unique constraints for this table
+   * @throws IOException if unable to write the data to the store.
+   */
+  void putUniqueConstraints(List<SQLUniqueConstraint> uks) throws IOException {
+    byte[][] serialized = HBaseUtils.serializeUniqueConstraints(uks);
+    store(TABLE_TABLE, serialized[0], CATALOG_CF, UNIQUE_CONSTRAINT_COL, serialized[1]);
+  }
+
+  /**
+   * Create one or more not null constraints on a table.
+   * @param nns Not null constraints for this table
+   * @throws IOException if unable to write the data to the store.
+   */
+  void putNotNullConstraints(List<SQLNotNullConstraint> nns) throws IOException {
+    byte[][] serialized = HBaseUtils.serializeNotNullConstraints(nns);
+    store(TABLE_TABLE, serialized[0], CATALOG_CF, NOT_NULL_CONSTRAINT_COL, serialized[1]);
+  }
+
+  /**
    * Drop the primary key from a table.
    * @param dbName database the table is in
    * @param tableName table name
@@ -2629,6 +2681,28 @@ public class HBaseReadWrite implements MetadataStore {
     delete(TABLE_TABLE, key, CATALOG_CF, FOREIGN_KEY_COL);
   }
 
+  /**
+   * Drop the unique constraint from a table.
+   * @param dbName database the table is in
+   * @param tableName table name
+   * @throws IOException if unable to delete from the store
+   */
+  void deleteUniqueConstraint(String dbName, String tableName) throws IOException {
+    byte[] key = HBaseUtils.buildKey(dbName, tableName);
+    delete(TABLE_TABLE, key, CATALOG_CF, UNIQUE_CONSTRAINT_COL);
+  }
+
+  /**
+   * Drop the not null constraint from a table.
+   * @param dbName database the table is in
+   * @param tableName table name
+   * @throws IOException if unable to delete from the store
+   */
+  void deleteNotNullConstraint(String dbName, String tableName) throws IOException {
+    byte[] key = HBaseUtils.buildKey(dbName, tableName);
+    delete(TABLE_TABLE, key, CATALOG_CF, NOT_NULL_CONSTRAINT_COL);
+  }
+
   /**********************************************************************************************
    * Cache methods
    *********************************************************************************************/

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index a7681dd..5a45051 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -22,7 +22,6 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.cache.CacheLoader;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.ObjectPair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -65,7 +64,9 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.Type;
@@ -2739,15 +2740,56 @@ public class HBaseStore implements RawStore {
   }
 
   @Override
+  public List<SQLUniqueConstraint> getUniqueConstraints(String db_name, String tbl_name)
+          throws MetaException {
+    db_name = HiveStringUtils.normalizeIdentifier(db_name);
+    tbl_name = HiveStringUtils.normalizeIdentifier(tbl_name);
+    boolean commit = false;
+    openTransaction();
+    try {
+      List<SQLUniqueConstraint> uk = getHBase().getUniqueConstraint(db_name, tbl_name);
+      commit = true;
+      return uk;
+    } catch (IOException e) {
+      LOG.error("Unable to get unique constraint", e);
+      throw new MetaException("Error reading db " + e.getMessage());
+    } finally {
+      commitOrRoleBack(commit);
+    }
+  }
+
+  @Override
+  public List<SQLNotNullConstraint> getNotNullConstraints(String db_name, String tbl_name)
+          throws MetaException {
+    db_name = HiveStringUtils.normalizeIdentifier(db_name);
+    tbl_name = HiveStringUtils.normalizeIdentifier(tbl_name);
+    boolean commit = false;
+    openTransaction();
+    try {
+      List<SQLNotNullConstraint> nn = getHBase().getNotNullConstraint(db_name, tbl_name);
+      commit = true;
+      return nn;
+    } catch (IOException e) {
+      LOG.error("Unable to get not null constraint", e);
+      throw new MetaException("Error reading db " + e.getMessage());
+    } finally {
+      commitOrRoleBack(commit);
+    }
+  }
+
+  @Override
   public void createTableWithConstraints(Table tbl, List<SQLPrimaryKey> primaryKeys,
-                                         List<SQLForeignKey> foreignKeys)
-      throws InvalidObjectException, MetaException {
+      List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints,
+      List<SQLNotNullConstraint> notNullConstraints)
+          throws InvalidObjectException, MetaException {
     boolean commit = false;
     openTransaction();
     try {
       createTable(tbl);
       if (primaryKeys != null) addPrimaryKeys(primaryKeys);
       if (foreignKeys != null) addForeignKeys(foreignKeys);
+      if (uniqueConstraints != null) addUniqueConstraints(uniqueConstraints);
+      if (notNullConstraints != null) addNotNullConstraints(notNullConstraints);
       commit = true;
     } finally {
       commitOrRoleBack(commit);
@@ -2787,6 +2829,20 @@ public class HBaseStore implements RawStore {
         return;
       }
 
+      List<SQLUniqueConstraint> uk = getHBase().getUniqueConstraint(dbName, tableName);
+      if (uk != null && uk.size() > 0 && uk.get(0).getUk_name().equals(constraintName)) {
+        getHBase().deleteUniqueConstraint(dbName, tableName);
+        commit = true;
+        return;
+      }
+
+      List<SQLNotNullConstraint> nn = getHBase().getNotNullConstraint(dbName, tableName);
+      if (nn != null && nn.size() > 0 && nn.get(0).getNn_name().equals(constraintName)) {
+        getHBase().deleteNotNullConstraint(dbName, tableName);
+        commit = true;
+        return;
+      }
+
       commit = true;
       throw new NoSuchObjectException("Unable to find constraint named " + constraintName +
         " on table " + tableNameForErrorMsg(dbName, tableName));
@@ -2853,6 +2909,47 @@ public class HBaseStore implements RawStore {
     }
   }
 
+  public void addUniqueConstraints(List<SQLUniqueConstraint> uks) throws InvalidObjectException, MetaException {
+    boolean commit = false;
+    for (SQLUniqueConstraint uk : uks) {
+      uk.setTable_db(HiveStringUtils.normalizeIdentifier(uk.getTable_db()));
+      uk.setTable_name(HiveStringUtils.normalizeIdentifier(uk.getTable_name()));
+      uk.setColumn_name(HiveStringUtils.normalizeIdentifier(uk.getColumn_name()));
+      uk.setUk_name(HiveStringUtils.normalizeIdentifier(uk.getUk_name()));
+    }
+    openTransaction();
+    try {
+      getHBase().putUniqueConstraints(uks);
+      commit = true;
+    } catch (IOException e) {
+      LOG.error("Error writing unique constraints", e);
+      throw new MetaException("Error writing unique constraints: " + e.getMessage());
+    } finally {
+      commitOrRoleBack(commit);
+    }
+  }
+
+  @Override
+  public void addNotNullConstraints(List<SQLNotNullConstraint> nns) throws InvalidObjectException, MetaException {
+    boolean commit = false;
+    for (SQLNotNullConstraint nn : nns) {
+      nn.setTable_db(HiveStringUtils.normalizeIdentifier(nn.getTable_db()));
+      nn.setTable_name(HiveStringUtils.normalizeIdentifier(nn.getTable_name()));
+      nn.setColumn_name(HiveStringUtils.normalizeIdentifier(nn.getColumn_name()));
+      nn.setNn_name(HiveStringUtils.normalizeIdentifier(nn.getNn_name()));
+    }
+    openTransaction();
+    try {
+      getHBase().putNotNullConstraints(nns);
+      commit = true;
+    } catch (IOException e) {
+      LOG.error("Error writing not null constraints", e);
+      throw new MetaException("Error writing not null constraints: " + e.getMessage());
+    } finally {
+      commitOrRoleBack(commit);
+    }
+  }
+
   @Override
   public Map<String, List<ColumnStatisticsObj>> getColStatsForTablePartitions(String dbName,
       String tableName) throws MetaException, NoSuchObjectException {

http://git-wip-us.apache.org/repos/asf/hive/blob/696be9f5/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
index 64082e8..6b7eb9e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
@@ -37,7 +37,6 @@ import java.util.TreeSet;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
@@ -63,7 +62,9 @@ import org.apache.hadoop.hive.metastore.api.ResourceType;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -1586,6 +1587,99 @@ public class HBaseUtils {
     return result;
   }
 
+  /**
+   * Serialize the unique constraint(s) for a table.
+   * @param uks Unique constraint columns.  These may belong to multiple unique constraints.
+   * @return two byte arrays, first contains the key, the second the serialized value.
+   */
+  static byte[][] serializeUniqueConstraints(List<SQLUniqueConstraint> uks) {
+    // First, figure out the dbName and tableName.  We expect this to match for all list entries.
+    byte[][] result = new byte[2][];
+    String dbName = uks.get(0).getTable_db();
+    String tableName = uks.get(0).getTable_name();
+    result[0] = buildKey(HiveStringUtils.normalizeIdentifier(dbName),
+        HiveStringUtils.normalizeIdentifier(tableName));
+
+    HbaseMetastoreProto.UniqueConstraints.Builder builder =
+        HbaseMetastoreProto.UniqueConstraints.newBuilder();
+
+    // Encode any foreign keys we find.  This can be complex because there may be more than
+    // one foreign key in here, so we need to detect that.
+    Map<String, HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.Builder> ukBuilders = new HashMap<>();
+
+    for (SQLUniqueConstraint ukcol : uks) {
+      HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.Builder ukBuilder =
+          ukBuilders.get(ukcol.getUk_name());
+      if (ukBuilder == null) {
+        // We haven't seen this key before, so add it
+        ukBuilder = HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.newBuilder();
+        ukBuilder.setUkName(ukcol.getUk_name());
+        ukBuilder.setEnableConstraint(ukcol.isEnable_cstr());
+        ukBuilder.setValidateConstraint(ukcol.isValidate_cstr());
+        ukBuilder.setRelyConstraint(ukcol.isRely_cstr());
+        ukBuilders.put(ukcol.getUk_name(), ukBuilder);
+      }
+      assert dbName.equals(ukcol.getTable_db()) : "You switched databases on me!";
+      assert tableName.equals(ukcol.getTable_name()) : "You switched tables on me!";
+      HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.Builder ukColBuilder =
+          HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn.newBuilder();
+      ukColBuilder.setColumnName(ukcol.getColumn_name());
+      ukColBuilder.setKeySeq(ukcol.getKey_seq());
+      ukBuilder.addCols(ukColBuilder);
+    }
+    for (HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.Builder ukBuilder : ukBuilders.values()) {
+      builder.addUks(ukBuilder);
+    }
+    result[1] = builder.build().toByteArray();
+    return result;
+  }
+
+  /**
+   * Serialize the not null constraint(s) for a table.
+   * @param nns Not null constraint columns.  These may belong to multiple constraints.
+   * @return two byte arrays, first contains the constraint, the second the serialized value.
+   */
+  static byte[][] serializeNotNullConstraints(List<SQLNotNullConstraint> nns) {
+    // First, figure out the dbName and tableName.  We expect this to match for all list entries.
+    byte[][] result = new byte[2][];
+    String dbName = nns.get(0).getTable_db();
+    String tableName = nns.get(0).getTable_name();
+    result[0] = buildKey(HiveStringUtils.normalizeIdentifier(dbName),
+        HiveStringUtils.normalizeIdentifier(tableName));
+
+    HbaseMetastoreProto.NotNullConstraints.Builder builder =
+        HbaseMetastoreProto.NotNullConstraints.newBuilder();
+
+    // Encode any foreign keys we find.  This can be complex because there may be more than
+    // one foreign key in here, so we need to detect that.
+    Map<String, HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.Builder> nnBuilders = new HashMap<>();
+
+    for (SQLNotNullConstraint nncol : nns) {
+      HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.Builder nnBuilder =
+          nnBuilders.get(nncol.getNn_name());
+      if (nnBuilder == null) {
+        // We haven't seen this key before, so add it
+        nnBuilder = HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.newBuilder();
+        nnBuilder.setNnName(nncol.getNn_name());
+        nnBuilder.setEnableConstraint(nncol.isEnable_cstr());
+        nnBuilder.setValidateConstraint(nncol.isValidate_cstr());
+        nnBuilder.setRelyConstraint(nncol.isRely_cstr());
+        nnBuilders.put(nncol.getNn_name(), nnBuilder);
+      }
+      assert dbName.equals(nncol.getTable_db()) : "You switched databases on me!";
+      assert tableName.equals(nncol.getTable_name()) : "You switched tables on me!";
+      HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.NotNullConstraintColumn.Builder nnColBuilder =
+          HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.NotNullConstraintColumn.newBuilder();
+      nnColBuilder.setColumnName(nncol.getColumn_name());
+      nnBuilder.addCols(nnColBuilder);
+    }
+    for (HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.Builder nnBuilder : nnBuilders.values()) {
+      builder.addNns(nnBuilder);
+    }
+    result[1] = builder.build().toByteArray();
+    return result;
+  }
+
   static List<SQLPrimaryKey> deserializePrimaryKey(String dbName, String tableName, byte[] value)
       throws InvalidProtocolBufferException {
     HbaseMetastoreProto.PrimaryKey proto = HbaseMetastoreProto.PrimaryKey.parseFrom(value);
@@ -1599,6 +1693,41 @@ public class HBaseUtils {
     return result;
   }
 
+  static List<SQLUniqueConstraint> deserializeUniqueConstraint(String dbName, String tableName, byte[] value)
+      throws InvalidProtocolBufferException {
+    List<SQLUniqueConstraint> result = new ArrayList<>();
+    HbaseMetastoreProto.UniqueConstraints protoConstraints =
+        HbaseMetastoreProto.UniqueConstraints.parseFrom(value);
+
+    for (HbaseMetastoreProto.UniqueConstraints.UniqueConstraint proto : protoConstraints.getUksList()) {
+      for (HbaseMetastoreProto.UniqueConstraints.UniqueConstraint.UniqueConstraintColumn protoUkCol :
+          proto.getColsList()) {
+        result.add(new SQLUniqueConstraint(dbName, tableName, protoUkCol.getColumnName(),
+            protoUkCol.getKeySeq(),
+            proto.getUkName(), proto.getEnableConstraint(),
+            proto.getValidateConstraint(), proto.getRelyConstraint()));
+      }
+    }
+    return result;
+  }
+
+  static List<SQLNotNullConstraint> deserializeNotNullConstraint(String dbName, String tableName, byte[] value)
+      throws InvalidProtocolBufferException {
+    List<SQLNotNullConstraint> result = new ArrayList<>();
+    HbaseMetastoreProto.NotNullConstraints protoConstraints =
+        HbaseMetastoreProto.NotNullConstraints.parseFrom(value);
+
+    for (HbaseMetastoreProto.NotNullConstraints.NotNullConstraint proto : protoConstraints.getNnsList()) {
+      for (HbaseMetastoreProto.NotNullConstraints.NotNullConstraint.NotNullConstraintColumn protoNnCol :
+          proto.getColsList()) {
+        result.add(new SQLNotNullConstraint(dbName, tableName, protoNnCol.getColumnName(),
+            proto.getNnName(), proto.getEnableConstraint(),
+            proto.getValidateConstraint(), proto.getRelyConstraint()));
+      }
+    }
+    return result;
+  }
+
   static List<SQLForeignKey> deserializeForeignKeys(String dbName, String tableName, byte[] value)
       throws InvalidProtocolBufferException {
     List<SQLForeignKey> result = new ArrayList<>();