You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2016/05/11 20:31:32 UTC

[02/12] hive git commit: HIVE-13350: Support Alter commands for Rely/NoRely novalidate for PK/FK constraints (Hari Subramaniyan, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 06527e3..b47bb59 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -6353,6 +6353,158 @@ class DropConstraintRequest:
   def __ne__(self, other):
     return not (self == other)
 
+class AddPrimaryKeyRequest:
+  """
+  Attributes:
+   - primaryKeyCols
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'primaryKeyCols', (TType.STRUCT,(SQLPrimaryKey, SQLPrimaryKey.thrift_spec)), None, ), # 1
+  )
+
+  def __init__(self, primaryKeyCols=None,):
+    self.primaryKeyCols = primaryKeyCols
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.primaryKeyCols = []
+          (_etype302, _size299) = iprot.readListBegin()
+          for _i303 in xrange(_size299):
+            _elem304 = SQLPrimaryKey()
+            _elem304.read(iprot)
+            self.primaryKeyCols.append(_elem304)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AddPrimaryKeyRequest')
+    if self.primaryKeyCols is not None:
+      oprot.writeFieldBegin('primaryKeyCols', TType.LIST, 1)
+      oprot.writeListBegin(TType.STRUCT, len(self.primaryKeyCols))
+      for iter305 in self.primaryKeyCols:
+        iter305.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.primaryKeyCols is None:
+      raise TProtocol.TProtocolException(message='Required field primaryKeyCols is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.primaryKeyCols)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AddForeignKeyRequest:
+  """
+  Attributes:
+   - foreignKeyCols
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'foreignKeyCols', (TType.STRUCT,(SQLForeignKey, SQLForeignKey.thrift_spec)), None, ), # 1
+  )
+
+  def __init__(self, foreignKeyCols=None,):
+    self.foreignKeyCols = foreignKeyCols
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.foreignKeyCols = []
+          (_etype309, _size306) = iprot.readListBegin()
+          for _i310 in xrange(_size306):
+            _elem311 = SQLForeignKey()
+            _elem311.read(iprot)
+            self.foreignKeyCols.append(_elem311)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AddForeignKeyRequest')
+    if self.foreignKeyCols is not None:
+      oprot.writeFieldBegin('foreignKeyCols', TType.LIST, 1)
+      oprot.writeListBegin(TType.STRUCT, len(self.foreignKeyCols))
+      for iter312 in self.foreignKeyCols:
+        iter312.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.foreignKeyCols is None:
+      raise TProtocol.TProtocolException(message='Required field foreignKeyCols is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.foreignKeyCols)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
 class PartitionsByExprResult:
   """
   Attributes:
@@ -6382,11 +6534,11 @@ class PartitionsByExprResult:
       if fid == 1:
         if ftype == TType.LIST:
           self.partitions = []
-          (_etype302, _size299) = iprot.readListBegin()
-          for _i303 in xrange(_size299):
-            _elem304 = Partition()
-            _elem304.read(iprot)
-            self.partitions.append(_elem304)
+          (_etype316, _size313) = iprot.readListBegin()
+          for _i317 in xrange(_size313):
+            _elem318 = Partition()
+            _elem318.read(iprot)
+            self.partitions.append(_elem318)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -6408,8 +6560,8 @@ class PartitionsByExprResult:
     if self.partitions is not None:
       oprot.writeFieldBegin('partitions', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.partitions))
-      for iter305 in self.partitions:
-        iter305.write(oprot)
+      for iter319 in self.partitions:
+        iter319.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.hasUnknownPartitions is not None:
@@ -6593,11 +6745,11 @@ class TableStatsResult:
       if fid == 1:
         if ftype == TType.LIST:
           self.tableStats = []
-          (_etype309, _size306) = iprot.readListBegin()
-          for _i310 in xrange(_size306):
-            _elem311 = ColumnStatisticsObj()
-            _elem311.read(iprot)
-            self.tableStats.append(_elem311)
+          (_etype323, _size320) = iprot.readListBegin()
+          for _i324 in xrange(_size320):
+            _elem325 = ColumnStatisticsObj()
+            _elem325.read(iprot)
+            self.tableStats.append(_elem325)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -6614,8 +6766,8 @@ class TableStatsResult:
     if self.tableStats is not None:
       oprot.writeFieldBegin('tableStats', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.tableStats))
-      for iter312 in self.tableStats:
-        iter312.write(oprot)
+      for iter326 in self.tableStats:
+        iter326.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -6669,17 +6821,17 @@ class PartitionsStatsResult:
       if fid == 1:
         if ftype == TType.MAP:
           self.partStats = {}
-          (_ktype314, _vtype315, _size313 ) = iprot.readMapBegin()
-          for _i317 in xrange(_size313):
-            _key318 = iprot.readString()
-            _val319 = []
-            (_etype323, _size320) = iprot.readListBegin()
-            for _i324 in xrange(_size320):
-              _elem325 = ColumnStatisticsObj()
-              _elem325.read(iprot)
-              _val319.append(_elem325)
+          (_ktype328, _vtype329, _size327 ) = iprot.readMapBegin()
+          for _i331 in xrange(_size327):
+            _key332 = iprot.readString()
+            _val333 = []
+            (_etype337, _size334) = iprot.readListBegin()
+            for _i338 in xrange(_size334):
+              _elem339 = ColumnStatisticsObj()
+              _elem339.read(iprot)
+              _val333.append(_elem339)
             iprot.readListEnd()
-            self.partStats[_key318] = _val319
+            self.partStats[_key332] = _val333
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
@@ -6696,11 +6848,11 @@ class PartitionsStatsResult:
     if self.partStats is not None:
       oprot.writeFieldBegin('partStats', TType.MAP, 1)
       oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.partStats))
-      for kiter326,viter327 in self.partStats.items():
-        oprot.writeString(kiter326)
-        oprot.writeListBegin(TType.STRUCT, len(viter327))
-        for iter328 in viter327:
-          iter328.write(oprot)
+      for kiter340,viter341 in self.partStats.items():
+        oprot.writeString(kiter340)
+        oprot.writeListBegin(TType.STRUCT, len(viter341))
+        for iter342 in viter341:
+          iter342.write(oprot)
         oprot.writeListEnd()
       oprot.writeMapEnd()
       oprot.writeFieldEnd()
@@ -6771,10 +6923,10 @@ class TableStatsRequest:
       elif fid == 3:
         if ftype == TType.LIST:
           self.colNames = []
-          (_etype332, _size329) = iprot.readListBegin()
-          for _i333 in xrange(_size329):
-            _elem334 = iprot.readString()
-            self.colNames.append(_elem334)
+          (_etype346, _size343) = iprot.readListBegin()
+          for _i347 in xrange(_size343):
+            _elem348 = iprot.readString()
+            self.colNames.append(_elem348)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -6799,8 +6951,8 @@ class TableStatsRequest:
     if self.colNames is not None:
       oprot.writeFieldBegin('colNames', TType.LIST, 3)
       oprot.writeListBegin(TType.STRING, len(self.colNames))
-      for iter335 in self.colNames:
-        oprot.writeString(iter335)
+      for iter349 in self.colNames:
+        oprot.writeString(iter349)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -6879,20 +7031,20 @@ class PartitionsStatsRequest:
       elif fid == 3:
         if ftype == TType.LIST:
           self.colNames = []
-          (_etype339, _size336) = iprot.readListBegin()
-          for _i340 in xrange(_size336):
-            _elem341 = iprot.readString()
-            self.colNames.append(_elem341)
+          (_etype353, _size350) = iprot.readListBegin()
+          for _i354 in xrange(_size350):
+            _elem355 = iprot.readString()
+            self.colNames.append(_elem355)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 4:
         if ftype == TType.LIST:
           self.partNames = []
-          (_etype345, _size342) = iprot.readListBegin()
-          for _i346 in xrange(_size342):
-            _elem347 = iprot.readString()
-            self.partNames.append(_elem347)
+          (_etype359, _size356) = iprot.readListBegin()
+          for _i360 in xrange(_size356):
+            _elem361 = iprot.readString()
+            self.partNames.append(_elem361)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -6917,15 +7069,15 @@ class PartitionsStatsRequest:
     if self.colNames is not None:
       oprot.writeFieldBegin('colNames', TType.LIST, 3)
       oprot.writeListBegin(TType.STRING, len(self.colNames))
-      for iter348 in self.colNames:
-        oprot.writeString(iter348)
+      for iter362 in self.colNames:
+        oprot.writeString(iter362)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.partNames is not None:
       oprot.writeFieldBegin('partNames', TType.LIST, 4)
       oprot.writeListBegin(TType.STRING, len(self.partNames))
-      for iter349 in self.partNames:
-        oprot.writeString(iter349)
+      for iter363 in self.partNames:
+        oprot.writeString(iter363)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -6988,11 +7140,11 @@ class AddPartitionsResult:
       if fid == 1:
         if ftype == TType.LIST:
           self.partitions = []
-          (_etype353, _size350) = iprot.readListBegin()
-          for _i354 in xrange(_size350):
-            _elem355 = Partition()
-            _elem355.read(iprot)
-            self.partitions.append(_elem355)
+          (_etype367, _size364) = iprot.readListBegin()
+          for _i368 in xrange(_size364):
+            _elem369 = Partition()
+            _elem369.read(iprot)
+            self.partitions.append(_elem369)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -7009,8 +7161,8 @@ class AddPartitionsResult:
     if self.partitions is not None:
       oprot.writeFieldBegin('partitions', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.partitions))
-      for iter356 in self.partitions:
-        iter356.write(oprot)
+      for iter370 in self.partitions:
+        iter370.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -7084,11 +7236,11 @@ class AddPartitionsRequest:
       elif fid == 3:
         if ftype == TType.LIST:
           self.parts = []
-          (_etype360, _size357) = iprot.readListBegin()
-          for _i361 in xrange(_size357):
-            _elem362 = Partition()
-            _elem362.read(iprot)
-            self.parts.append(_elem362)
+          (_etype374, _size371) = iprot.readListBegin()
+          for _i375 in xrange(_size371):
+            _elem376 = Partition()
+            _elem376.read(iprot)
+            self.parts.append(_elem376)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -7123,8 +7275,8 @@ class AddPartitionsRequest:
     if self.parts is not None:
       oprot.writeFieldBegin('parts', TType.LIST, 3)
       oprot.writeListBegin(TType.STRUCT, len(self.parts))
-      for iter363 in self.parts:
-        iter363.write(oprot)
+      for iter377 in self.parts:
+        iter377.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.ifNotExists is not None:
@@ -7196,11 +7348,11 @@ class DropPartitionsResult:
       if fid == 1:
         if ftype == TType.LIST:
           self.partitions = []
-          (_etype367, _size364) = iprot.readListBegin()
-          for _i368 in xrange(_size364):
-            _elem369 = Partition()
-            _elem369.read(iprot)
-            self.partitions.append(_elem369)
+          (_etype381, _size378) = iprot.readListBegin()
+          for _i382 in xrange(_size378):
+            _elem383 = Partition()
+            _elem383.read(iprot)
+            self.partitions.append(_elem383)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -7217,8 +7369,8 @@ class DropPartitionsResult:
     if self.partitions is not None:
       oprot.writeFieldBegin('partitions', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.partitions))
-      for iter370 in self.partitions:
-        iter370.write(oprot)
+      for iter384 in self.partitions:
+        iter384.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -7353,21 +7505,21 @@ class RequestPartsSpec:
       if fid == 1:
         if ftype == TType.LIST:
           self.names = []
-          (_etype374, _size371) = iprot.readListBegin()
-          for _i375 in xrange(_size371):
-            _elem376 = iprot.readString()
-            self.names.append(_elem376)
+          (_etype388, _size385) = iprot.readListBegin()
+          for _i389 in xrange(_size385):
+            _elem390 = iprot.readString()
+            self.names.append(_elem390)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.LIST:
           self.exprs = []
-          (_etype380, _size377) = iprot.readListBegin()
-          for _i381 in xrange(_size377):
-            _elem382 = DropPartitionsExpr()
-            _elem382.read(iprot)
-            self.exprs.append(_elem382)
+          (_etype394, _size391) = iprot.readListBegin()
+          for _i395 in xrange(_size391):
+            _elem396 = DropPartitionsExpr()
+            _elem396.read(iprot)
+            self.exprs.append(_elem396)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -7384,15 +7536,15 @@ class RequestPartsSpec:
     if self.names is not None:
       oprot.writeFieldBegin('names', TType.LIST, 1)
       oprot.writeListBegin(TType.STRING, len(self.names))
-      for iter383 in self.names:
-        oprot.writeString(iter383)
+      for iter397 in self.names:
+        oprot.writeString(iter397)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.exprs is not None:
       oprot.writeFieldBegin('exprs', TType.LIST, 2)
       oprot.writeListBegin(TType.STRUCT, len(self.exprs))
-      for iter384 in self.exprs:
-        iter384.write(oprot)
+      for iter398 in self.exprs:
+        iter398.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -7743,11 +7895,11 @@ class Function:
       elif fid == 8:
         if ftype == TType.LIST:
           self.resourceUris = []
-          (_etype388, _size385) = iprot.readListBegin()
-          for _i389 in xrange(_size385):
-            _elem390 = ResourceUri()
-            _elem390.read(iprot)
-            self.resourceUris.append(_elem390)
+          (_etype402, _size399) = iprot.readListBegin()
+          for _i403 in xrange(_size399):
+            _elem404 = ResourceUri()
+            _elem404.read(iprot)
+            self.resourceUris.append(_elem404)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -7792,8 +7944,8 @@ class Function:
     if self.resourceUris is not None:
       oprot.writeFieldBegin('resourceUris', TType.LIST, 8)
       oprot.writeListBegin(TType.STRUCT, len(self.resourceUris))
-      for iter391 in self.resourceUris:
-        iter391.write(oprot)
+      for iter405 in self.resourceUris:
+        iter405.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -8011,11 +8163,11 @@ class GetOpenTxnsInfoResponse:
       elif fid == 2:
         if ftype == TType.LIST:
           self.open_txns = []
-          (_etype395, _size392) = iprot.readListBegin()
-          for _i396 in xrange(_size392):
-            _elem397 = TxnInfo()
-            _elem397.read(iprot)
-            self.open_txns.append(_elem397)
+          (_etype409, _size406) = iprot.readListBegin()
+          for _i410 in xrange(_size406):
+            _elem411 = TxnInfo()
+            _elem411.read(iprot)
+            self.open_txns.append(_elem411)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -8036,8 +8188,8 @@ class GetOpenTxnsInfoResponse:
     if self.open_txns is not None:
       oprot.writeFieldBegin('open_txns', TType.LIST, 2)
       oprot.writeListBegin(TType.STRUCT, len(self.open_txns))
-      for iter398 in self.open_txns:
-        iter398.write(oprot)
+      for iter412 in self.open_txns:
+        iter412.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -8102,10 +8254,10 @@ class GetOpenTxnsResponse:
       elif fid == 2:
         if ftype == TType.SET:
           self.open_txns = set()
-          (_etype402, _size399) = iprot.readSetBegin()
-          for _i403 in xrange(_size399):
-            _elem404 = iprot.readI64()
-            self.open_txns.add(_elem404)
+          (_etype416, _size413) = iprot.readSetBegin()
+          for _i417 in xrange(_size413):
+            _elem418 = iprot.readI64()
+            self.open_txns.add(_elem418)
           iprot.readSetEnd()
         else:
           iprot.skip(ftype)
@@ -8126,8 +8278,8 @@ class GetOpenTxnsResponse:
     if self.open_txns is not None:
       oprot.writeFieldBegin('open_txns', TType.SET, 2)
       oprot.writeSetBegin(TType.I64, len(self.open_txns))
-      for iter405 in self.open_txns:
-        oprot.writeI64(iter405)
+      for iter419 in self.open_txns:
+        oprot.writeI64(iter419)
       oprot.writeSetEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -8294,10 +8446,10 @@ class OpenTxnsResponse:
       if fid == 1:
         if ftype == TType.LIST:
           self.txn_ids = []
-          (_etype409, _size406) = iprot.readListBegin()
-          for _i410 in xrange(_size406):
-            _elem411 = iprot.readI64()
-            self.txn_ids.append(_elem411)
+          (_etype423, _size420) = iprot.readListBegin()
+          for _i424 in xrange(_size420):
+            _elem425 = iprot.readI64()
+            self.txn_ids.append(_elem425)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -8314,8 +8466,8 @@ class OpenTxnsResponse:
     if self.txn_ids is not None:
       oprot.writeFieldBegin('txn_ids', TType.LIST, 1)
       oprot.writeListBegin(TType.I64, len(self.txn_ids))
-      for iter412 in self.txn_ids:
-        oprot.writeI64(iter412)
+      for iter426 in self.txn_ids:
+        oprot.writeI64(iter426)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -8436,10 +8588,10 @@ class AbortTxnsRequest:
       if fid == 1:
         if ftype == TType.LIST:
           self.txn_ids = []
-          (_etype416, _size413) = iprot.readListBegin()
-          for _i417 in xrange(_size413):
-            _elem418 = iprot.readI64()
-            self.txn_ids.append(_elem418)
+          (_etype430, _size427) = iprot.readListBegin()
+          for _i431 in xrange(_size427):
+            _elem432 = iprot.readI64()
+            self.txn_ids.append(_elem432)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -8456,8 +8608,8 @@ class AbortTxnsRequest:
     if self.txn_ids is not None:
       oprot.writeFieldBegin('txn_ids', TType.LIST, 1)
       oprot.writeListBegin(TType.I64, len(self.txn_ids))
-      for iter419 in self.txn_ids:
-        oprot.writeI64(iter419)
+      for iter433 in self.txn_ids:
+        oprot.writeI64(iter433)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -8713,11 +8865,11 @@ class LockRequest:
       if fid == 1:
         if ftype == TType.LIST:
           self.component = []
-          (_etype423, _size420) = iprot.readListBegin()
-          for _i424 in xrange(_size420):
-            _elem425 = LockComponent()
-            _elem425.read(iprot)
-            self.component.append(_elem425)
+          (_etype437, _size434) = iprot.readListBegin()
+          for _i438 in xrange(_size434):
+            _elem439 = LockComponent()
+            _elem439.read(iprot)
+            self.component.append(_elem439)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -8754,8 +8906,8 @@ class LockRequest:
     if self.component is not None:
       oprot.writeFieldBegin('component', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.component))
-      for iter426 in self.component:
-        iter426.write(oprot)
+      for iter440 in self.component:
+        iter440.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.txnid is not None:
@@ -9453,11 +9605,11 @@ class ShowLocksResponse:
       if fid == 1:
         if ftype == TType.LIST:
           self.locks = []
-          (_etype430, _size427) = iprot.readListBegin()
-          for _i431 in xrange(_size427):
-            _elem432 = ShowLocksResponseElement()
-            _elem432.read(iprot)
-            self.locks.append(_elem432)
+          (_etype444, _size441) = iprot.readListBegin()
+          for _i445 in xrange(_size441):
+            _elem446 = ShowLocksResponseElement()
+            _elem446.read(iprot)
+            self.locks.append(_elem446)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -9474,8 +9626,8 @@ class ShowLocksResponse:
     if self.locks is not None:
       oprot.writeFieldBegin('locks', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.locks))
-      for iter433 in self.locks:
-        iter433.write(oprot)
+      for iter447 in self.locks:
+        iter447.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -9690,20 +9842,20 @@ class HeartbeatTxnRangeResponse:
       if fid == 1:
         if ftype == TType.SET:
           self.aborted = set()
-          (_etype437, _size434) = iprot.readSetBegin()
-          for _i438 in xrange(_size434):
-            _elem439 = iprot.readI64()
-            self.aborted.add(_elem439)
+          (_etype451, _size448) = iprot.readSetBegin()
+          for _i452 in xrange(_size448):
+            _elem453 = iprot.readI64()
+            self.aborted.add(_elem453)
           iprot.readSetEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.SET:
           self.nosuch = set()
-          (_etype443, _size440) = iprot.readSetBegin()
-          for _i444 in xrange(_size440):
-            _elem445 = iprot.readI64()
-            self.nosuch.add(_elem445)
+          (_etype457, _size454) = iprot.readSetBegin()
+          for _i458 in xrange(_size454):
+            _elem459 = iprot.readI64()
+            self.nosuch.add(_elem459)
           iprot.readSetEnd()
         else:
           iprot.skip(ftype)
@@ -9720,15 +9872,15 @@ class HeartbeatTxnRangeResponse:
     if self.aborted is not None:
       oprot.writeFieldBegin('aborted', TType.SET, 1)
       oprot.writeSetBegin(TType.I64, len(self.aborted))
-      for iter446 in self.aborted:
-        oprot.writeI64(iter446)
+      for iter460 in self.aborted:
+        oprot.writeI64(iter460)
       oprot.writeSetEnd()
       oprot.writeFieldEnd()
     if self.nosuch is not None:
       oprot.writeFieldBegin('nosuch', TType.SET, 2)
       oprot.writeSetBegin(TType.I64, len(self.nosuch))
-      for iter447 in self.nosuch:
-        oprot.writeI64(iter447)
+      for iter461 in self.nosuch:
+        oprot.writeI64(iter461)
       oprot.writeSetEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -10170,11 +10322,11 @@ class ShowCompactResponse:
       if fid == 1:
         if ftype == TType.LIST:
           self.compacts = []
-          (_etype451, _size448) = iprot.readListBegin()
-          for _i452 in xrange(_size448):
-            _elem453 = ShowCompactResponseElement()
-            _elem453.read(iprot)
-            self.compacts.append(_elem453)
+          (_etype465, _size462) = iprot.readListBegin()
+          for _i466 in xrange(_size462):
+            _elem467 = ShowCompactResponseElement()
+            _elem467.read(iprot)
+            self.compacts.append(_elem467)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -10191,8 +10343,8 @@ class ShowCompactResponse:
     if self.compacts is not None:
       oprot.writeFieldBegin('compacts', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.compacts))
-      for iter454 in self.compacts:
-        iter454.write(oprot)
+      for iter468 in self.compacts:
+        iter468.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -10270,10 +10422,10 @@ class AddDynamicPartitions:
       elif fid == 4:
         if ftype == TType.LIST:
           self.partitionnames = []
-          (_etype458, _size455) = iprot.readListBegin()
-          for _i459 in xrange(_size455):
-            _elem460 = iprot.readString()
-            self.partitionnames.append(_elem460)
+          (_etype472, _size469) = iprot.readListBegin()
+          for _i473 in xrange(_size469):
+            _elem474 = iprot.readString()
+            self.partitionnames.append(_elem474)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -10302,8 +10454,8 @@ class AddDynamicPartitions:
     if self.partitionnames is not None:
       oprot.writeFieldBegin('partitionnames', TType.LIST, 4)
       oprot.writeListBegin(TType.STRING, len(self.partitionnames))
-      for iter461 in self.partitionnames:
-        oprot.writeString(iter461)
+      for iter475 in self.partitionnames:
+        oprot.writeString(iter475)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -10584,11 +10736,11 @@ class NotificationEventResponse:
       if fid == 1:
         if ftype == TType.LIST:
           self.events = []
-          (_etype465, _size462) = iprot.readListBegin()
-          for _i466 in xrange(_size462):
-            _elem467 = NotificationEvent()
-            _elem467.read(iprot)
-            self.events.append(_elem467)
+          (_etype479, _size476) = iprot.readListBegin()
+          for _i480 in xrange(_size476):
+            _elem481 = NotificationEvent()
+            _elem481.read(iprot)
+            self.events.append(_elem481)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -10605,8 +10757,8 @@ class NotificationEventResponse:
     if self.events is not None:
       oprot.writeFieldBegin('events', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.events))
-      for iter468 in self.events:
-        iter468.write(oprot)
+      for iter482 in self.events:
+        iter482.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -10727,10 +10879,10 @@ class InsertEventRequestData:
       if fid == 1:
         if ftype == TType.LIST:
           self.filesAdded = []
-          (_etype472, _size469) = iprot.readListBegin()
-          for _i473 in xrange(_size469):
-            _elem474 = iprot.readString()
-            self.filesAdded.append(_elem474)
+          (_etype486, _size483) = iprot.readListBegin()
+          for _i487 in xrange(_size483):
+            _elem488 = iprot.readString()
+            self.filesAdded.append(_elem488)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -10747,8 +10899,8 @@ class InsertEventRequestData:
     if self.filesAdded is not None:
       oprot.writeFieldBegin('filesAdded', TType.LIST, 1)
       oprot.writeListBegin(TType.STRING, len(self.filesAdded))
-      for iter475 in self.filesAdded:
-        oprot.writeString(iter475)
+      for iter489 in self.filesAdded:
+        oprot.writeString(iter489)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -10901,10 +11053,10 @@ class FireEventRequest:
       elif fid == 5:
         if ftype == TType.LIST:
           self.partitionVals = []
-          (_etype479, _size476) = iprot.readListBegin()
-          for _i480 in xrange(_size476):
-            _elem481 = iprot.readString()
-            self.partitionVals.append(_elem481)
+          (_etype493, _size490) = iprot.readListBegin()
+          for _i494 in xrange(_size490):
+            _elem495 = iprot.readString()
+            self.partitionVals.append(_elem495)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -10937,8 +11089,8 @@ class FireEventRequest:
     if self.partitionVals is not None:
       oprot.writeFieldBegin('partitionVals', TType.LIST, 5)
       oprot.writeListBegin(TType.STRING, len(self.partitionVals))
-      for iter482 in self.partitionVals:
-        oprot.writeString(iter482)
+      for iter496 in self.partitionVals:
+        oprot.writeString(iter496)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -11125,12 +11277,12 @@ class GetFileMetadataByExprResult:
       if fid == 1:
         if ftype == TType.MAP:
           self.metadata = {}
-          (_ktype484, _vtype485, _size483 ) = iprot.readMapBegin()
-          for _i487 in xrange(_size483):
-            _key488 = iprot.readI64()
-            _val489 = MetadataPpdResult()
-            _val489.read(iprot)
-            self.metadata[_key488] = _val489
+          (_ktype498, _vtype499, _size497 ) = iprot.readMapBegin()
+          for _i501 in xrange(_size497):
+            _key502 = iprot.readI64()
+            _val503 = MetadataPpdResult()
+            _val503.read(iprot)
+            self.metadata[_key502] = _val503
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
@@ -11152,9 +11304,9 @@ class GetFileMetadataByExprResult:
     if self.metadata is not None:
       oprot.writeFieldBegin('metadata', TType.MAP, 1)
       oprot.writeMapBegin(TType.I64, TType.STRUCT, len(self.metadata))
-      for kiter490,viter491 in self.metadata.items():
-        oprot.writeI64(kiter490)
-        viter491.write(oprot)
+      for kiter504,viter505 in self.metadata.items():
+        oprot.writeI64(kiter504)
+        viter505.write(oprot)
       oprot.writeMapEnd()
       oprot.writeFieldEnd()
     if self.isSupported is not None:
@@ -11224,10 +11376,10 @@ class GetFileMetadataByExprRequest:
       if fid == 1:
         if ftype == TType.LIST:
           self.fileIds = []
-          (_etype495, _size492) = iprot.readListBegin()
-          for _i496 in xrange(_size492):
-            _elem497 = iprot.readI64()
-            self.fileIds.append(_elem497)
+          (_etype509, _size506) = iprot.readListBegin()
+          for _i510 in xrange(_size506):
+            _elem511 = iprot.readI64()
+            self.fileIds.append(_elem511)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -11259,8 +11411,8 @@ class GetFileMetadataByExprRequest:
     if self.fileIds is not None:
       oprot.writeFieldBegin('fileIds', TType.LIST, 1)
       oprot.writeListBegin(TType.I64, len(self.fileIds))
-      for iter498 in self.fileIds:
-        oprot.writeI64(iter498)
+      for iter512 in self.fileIds:
+        oprot.writeI64(iter512)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.expr is not None:
@@ -11334,11 +11486,11 @@ class GetFileMetadataResult:
       if fid == 1:
         if ftype == TType.MAP:
           self.metadata = {}
-          (_ktype500, _vtype501, _size499 ) = iprot.readMapBegin()
-          for _i503 in xrange(_size499):
-            _key504 = iprot.readI64()
-            _val505 = iprot.readString()
-            self.metadata[_key504] = _val505
+          (_ktype514, _vtype515, _size513 ) = iprot.readMapBegin()
+          for _i517 in xrange(_size513):
+            _key518 = iprot.readI64()
+            _val519 = iprot.readString()
+            self.metadata[_key518] = _val519
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
@@ -11360,9 +11512,9 @@ class GetFileMetadataResult:
     if self.metadata is not None:
       oprot.writeFieldBegin('metadata', TType.MAP, 1)
       oprot.writeMapBegin(TType.I64, TType.STRING, len(self.metadata))
-      for kiter506,viter507 in self.metadata.items():
-        oprot.writeI64(kiter506)
-        oprot.writeString(viter507)
+      for kiter520,viter521 in self.metadata.items():
+        oprot.writeI64(kiter520)
+        oprot.writeString(viter521)
       oprot.writeMapEnd()
       oprot.writeFieldEnd()
     if self.isSupported is not None:
@@ -11423,10 +11575,10 @@ class GetFileMetadataRequest:
       if fid == 1:
         if ftype == TType.LIST:
           self.fileIds = []
-          (_etype511, _size508) = iprot.readListBegin()
-          for _i512 in xrange(_size508):
-            _elem513 = iprot.readI64()
-            self.fileIds.append(_elem513)
+          (_etype525, _size522) = iprot.readListBegin()
+          for _i526 in xrange(_size522):
+            _elem527 = iprot.readI64()
+            self.fileIds.append(_elem527)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -11443,8 +11595,8 @@ class GetFileMetadataRequest:
     if self.fileIds is not None:
       oprot.writeFieldBegin('fileIds', TType.LIST, 1)
       oprot.writeListBegin(TType.I64, len(self.fileIds))
-      for iter514 in self.fileIds:
-        oprot.writeI64(iter514)
+      for iter528 in self.fileIds:
+        oprot.writeI64(iter528)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -11550,20 +11702,20 @@ class PutFileMetadataRequest:
       if fid == 1:
         if ftype == TType.LIST:
           self.fileIds = []
-          (_etype518, _size515) = iprot.readListBegin()
-          for _i519 in xrange(_size515):
-            _elem520 = iprot.readI64()
-            self.fileIds.append(_elem520)
+          (_etype532, _size529) = iprot.readListBegin()
+          for _i533 in xrange(_size529):
+            _elem534 = iprot.readI64()
+            self.fileIds.append(_elem534)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 2:
         if ftype == TType.LIST:
           self.metadata = []
-          (_etype524, _size521) = iprot.readListBegin()
-          for _i525 in xrange(_size521):
-            _elem526 = iprot.readString()
-            self.metadata.append(_elem526)
+          (_etype538, _size535) = iprot.readListBegin()
+          for _i539 in xrange(_size535):
+            _elem540 = iprot.readString()
+            self.metadata.append(_elem540)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -11585,15 +11737,15 @@ class PutFileMetadataRequest:
     if self.fileIds is not None:
       oprot.writeFieldBegin('fileIds', TType.LIST, 1)
       oprot.writeListBegin(TType.I64, len(self.fileIds))
-      for iter527 in self.fileIds:
-        oprot.writeI64(iter527)
+      for iter541 in self.fileIds:
+        oprot.writeI64(iter541)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.metadata is not None:
       oprot.writeFieldBegin('metadata', TType.LIST, 2)
       oprot.writeListBegin(TType.STRING, len(self.metadata))
-      for iter528 in self.metadata:
-        oprot.writeString(iter528)
+      for iter542 in self.metadata:
+        oprot.writeString(iter542)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.type is not None:
@@ -11701,10 +11853,10 @@ class ClearFileMetadataRequest:
       if fid == 1:
         if ftype == TType.LIST:
           self.fileIds = []
-          (_etype532, _size529) = iprot.readListBegin()
-          for _i533 in xrange(_size529):
-            _elem534 = iprot.readI64()
-            self.fileIds.append(_elem534)
+          (_etype546, _size543) = iprot.readListBegin()
+          for _i547 in xrange(_size543):
+            _elem548 = iprot.readI64()
+            self.fileIds.append(_elem548)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -11721,8 +11873,8 @@ class ClearFileMetadataRequest:
     if self.fileIds is not None:
       oprot.writeFieldBegin('fileIds', TType.LIST, 1)
       oprot.writeListBegin(TType.I64, len(self.fileIds))
-      for iter535 in self.fileIds:
-        oprot.writeI64(iter535)
+      for iter549 in self.fileIds:
+        oprot.writeI64(iter549)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
@@ -11951,11 +12103,11 @@ class GetAllFunctionsResponse:
       if fid == 1:
         if ftype == TType.LIST:
           self.functions = []
-          (_etype539, _size536) = iprot.readListBegin()
-          for _i540 in xrange(_size536):
-            _elem541 = Function()
-            _elem541.read(iprot)
-            self.functions.append(_elem541)
+          (_etype553, _size550) = iprot.readListBegin()
+          for _i554 in xrange(_size550):
+            _elem555 = Function()
+            _elem555.read(iprot)
+            self.functions.append(_elem555)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -11972,8 +12124,8 @@ class GetAllFunctionsResponse:
     if self.functions is not None:
       oprot.writeFieldBegin('functions', TType.LIST, 1)
       oprot.writeListBegin(TType.STRUCT, len(self.functions))
-      for iter542 in self.functions:
-        iter542.write(oprot)
+      for iter556 in self.functions:
+        iter556.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index d6da518..2aa92d8 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -1409,6 +1409,40 @@ class DropConstraintRequest
   ::Thrift::Struct.generate_accessors self
 end
 
+class AddPrimaryKeyRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  PRIMARYKEYCOLS = 1
+
+  FIELDS = {
+    PRIMARYKEYCOLS => {:type => ::Thrift::Types::LIST, :name => 'primaryKeyCols', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLPrimaryKey}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field primaryKeyCols is unset!') unless @primaryKeyCols
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class AddForeignKeyRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  FOREIGNKEYCOLS = 1
+
+  FIELDS = {
+    FOREIGNKEYCOLS => {:type => ::Thrift::Types::LIST, :name => 'foreignKeyCols', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SQLForeignKey}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field foreignKeyCols is unset!') unless @foreignKeyCols
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class PartitionsByExprResult
   include ::Thrift::Struct, ::Thrift::Struct_Union
   PARTITIONS = 1

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
index 722f0f0..51f65c6 100644
--- a/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
+++ b/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
@@ -352,6 +352,38 @@ module ThriftHiveMetastore
       return
     end
 
+    def add_primary_key(req)
+      send_add_primary_key(req)
+      recv_add_primary_key()
+    end
+
+    def send_add_primary_key(req)
+      send_message('add_primary_key', Add_primary_key_args, :req => req)
+    end
+
+    def recv_add_primary_key()
+      result = receive_message(Add_primary_key_result)
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      return
+    end
+
+    def add_foreign_key(req)
+      send_add_foreign_key(req)
+      recv_add_foreign_key()
+    end
+
+    def send_add_foreign_key(req)
+      send_message('add_foreign_key', Add_foreign_key_args, :req => req)
+    end
+
+    def recv_add_foreign_key()
+      result = receive_message(Add_foreign_key_result)
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      return
+    end
+
     def drop_table(dbname, name, deleteData)
       send_drop_table(dbname, name, deleteData)
       recv_drop_table()
@@ -2733,6 +2765,32 @@ module ThriftHiveMetastore
       write_result(result, oprot, 'drop_constraint', seqid)
     end
 
+    def process_add_primary_key(seqid, iprot, oprot)
+      args = read_args(iprot, Add_primary_key_args)
+      result = Add_primary_key_result.new()
+      begin
+        @handler.add_primary_key(args.req)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'add_primary_key', seqid)
+    end
+
+    def process_add_foreign_key(seqid, iprot, oprot)
+      args = read_args(iprot, Add_foreign_key_args)
+      result = Add_foreign_key_result.new()
+      begin
+        @handler.add_foreign_key(args.req)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'add_foreign_key', seqid)
+    end
+
     def process_drop_table(seqid, iprot, oprot)
       args = read_args(iprot, Drop_table_args)
       result = Drop_table_result.new()
@@ -5021,6 +5079,74 @@ module ThriftHiveMetastore
     ::Thrift::Struct.generate_accessors self
   end
 
+  class Add_primary_key_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::AddPrimaryKeyRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Add_primary_key_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Add_foreign_key_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    REQ = 1
+
+    FIELDS = {
+      REQ => {:type => ::Thrift::Types::STRUCT, :name => 'req', :class => ::AddForeignKeyRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Add_foreign_key_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
   class Drop_table_args
     include ::Thrift::Struct, ::Thrift::Struct_Union
     DBNAME = 1

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 8d4e8f3..4b92b2a 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -1512,6 +1512,59 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         endFunction("drop_constraint", success, ex, constraintName);
       }
     }
+
+    @Override
+    public void add_primary_key(AddPrimaryKeyRequest req)
+      throws MetaException, InvalidObjectException {
+      List<SQLPrimaryKey> primaryKeyCols = req.getPrimaryKeyCols();
+      String constraintName = (primaryKeyCols != null && primaryKeyCols.size() > 0) ?
+        primaryKeyCols.get(0).getPk_name() : "null";
+      startFunction("add_primary_key", ": " + constraintName);
+      boolean success = false;
+      Exception ex = null;
+      try {
+        getMS().addPrimaryKeys(primaryKeyCols);
+        success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else {
+          throw newMetaException(e);
+        }
+      } finally {
+        endFunction("add_primary_key", success, ex, constraintName);
+      }
+    }
+
+    @Override
+    public void add_foreign_key(AddForeignKeyRequest req)
+      throws MetaException, InvalidObjectException {
+      List<SQLForeignKey> foreignKeyCols = req.getForeignKeyCols();
+      String constraintName = (foreignKeyCols != null && foreignKeyCols.size() > 0) ?
+        foreignKeyCols.get(0).getFk_name() : "null";
+      startFunction("add_foreign_key", ": " + constraintName);
+      boolean success = false;
+      Exception ex = null;
+      try {
+        getMS().addForeignKeys(foreignKeyCols);
+        success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else {
+          throw newMetaException(e);
+        }
+      } finally {
+        endFunction("add_foreign_key", success, ex, constraintName);
+      }
+    }
+
     private boolean is_table_exists(RawStore ms, String dbname, String name)
         throws MetaException {
       return (ms.getTable(dbname, name) != null);

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 3a63551..09091b8 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -29,8 +29,10 @@ import org.apache.hadoop.hive.conf.HiveConfUtil;
 import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
 import org.apache.hadoop.hive.metastore.api.AbortTxnsRequest;
 import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
+import org.apache.hadoop.hive.metastore.api.AddForeignKeyRequest;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsResult;
+import org.apache.hadoop.hive.metastore.api.AddPrimaryKeyRequest;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.CacheFileMetadataRequest;
@@ -772,6 +774,18 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
     client.drop_constraint(new DropConstraintRequest(dbName, tableName, constraintName));
   }
 
+  @Override
+  public void addPrimaryKey(List<SQLPrimaryKey> primaryKeyCols) throws
+    NoSuchObjectException, MetaException, TException {
+    client.add_primary_key(new AddPrimaryKeyRequest(primaryKeyCols));
+  }
+
+  @Override
+  public void addForeignKey(List<SQLForeignKey> foreignKeyCols) throws
+    NoSuchObjectException, MetaException, TException {
+    client.add_foreign_key(new AddForeignKeyRequest(foreignKeyCols));
+  }
+
 /**
    * @param type
    * @return true or false

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 68972fb..9f452c1 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -1576,4 +1576,9 @@ public interface IMetaStoreClient {
   void dropConstraint(String dbName, String tableName, String constraintName) throws 
     MetaException, NoSuchObjectException, TException;
 
+  void addPrimaryKey(List<SQLPrimaryKey> primaryKeyCols) throws
+  MetaException, NoSuchObjectException, TException;
+
+  void addForeignKey(List<SQLForeignKey> foreignKeyCols) throws
+  MetaException, NoSuchObjectException, TException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index fea4fc5..b6d5276 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -911,9 +911,12 @@ public class ObjectStore implements RawStore, Configurable {
     try {
       openTransaction();
       createTable(tbl);
-      addPrimaryKeys(primaryKeys);
-      addForeignKeys(foreignKeys);
-	  success = commitTransaction();
+      // Add primary keys and foreign keys.
+      // We need not do a deep retrieval of the Table Column Descriptor while persisting the PK/FK
+      // since this transaction involving create table is not yet committed.
+      addPrimaryKeys(primaryKeys, false);
+      addForeignKeys(foreignKeys, false);
+      success = commitTransaction();
     } finally {
       if (!success) {
         rollbackTransaction();
@@ -1265,7 +1268,20 @@ public class ObjectStore implements RawStore, Configurable {
     return getTables(dbName, ".*");
   }
 
-  private MTable getMTable(String db, String table) {
+  class AttachedMTableInfo {
+    MTable mtbl;
+    MColumnDescriptor mcd;
+
+    public AttachedMTableInfo() {}
+
+    public AttachedMTableInfo(MTable mtbl, MColumnDescriptor mcd) {
+      this.mtbl = mtbl;
+      this.mcd = mcd;
+    }
+  }
+
+  private AttachedMTableInfo getMTable(String db, String table, boolean retrieveCD) {
+    AttachedMTableInfo nmtbl = new AttachedMTableInfo();
     MTable mtbl = null;
     boolean commited = false;
     Query query = null;
@@ -1278,6 +1294,12 @@ public class ObjectStore implements RawStore, Configurable {
       query.setUnique(true);
       mtbl = (MTable) query.execute(table, db);
       pm.retrieve(mtbl);
+      // Retrieving CD can be expensive and unnecessary, so do it only when required.
+      if (mtbl != null && retrieveCD) {
+        pm.retrieve(mtbl.getSd());
+        pm.retrieveAll(mtbl.getSd().getCD());
+        nmtbl.mcd = mtbl.getSd().getCD();
+      }
       commited = commitTransaction();
     } finally {
       if (!commited) {
@@ -1287,7 +1309,13 @@ public class ObjectStore implements RawStore, Configurable {
         query.closeAll();
       }
     }
-    return mtbl;
+    nmtbl.mtbl = mtbl;
+    return nmtbl;
+  }
+
+  private MTable getMTable(String db, String table) {
+    AttachedMTableInfo nmtbl = getMTable(db, table, false);
+    return nmtbl.mtbl;
   }
 
   @Override
@@ -3302,8 +3330,10 @@ public class ObjectStore implements RawStore, Configurable {
     return sds;
   }
 
-  private int getColumnIndexForTable(MTable mtbl, String col) {
-    List<MFieldSchema> cols = mtbl.getSd().getCD().getCols();
+  private int getColumnIndexFromTableColumns(List<MFieldSchema> cols, String col) {
+    if (cols == null) {
+      return -1;
+    }
     for (int i = 0; i < cols.size(); i++) {
       MFieldSchema mfs = cols.get(i);
       if (mfs.getName().equalsIgnoreCase(col)) {
@@ -3351,33 +3381,47 @@ public class ObjectStore implements RawStore, Configurable {
     throw new MetaException("Error while trying to generate the constraint name for " + ArrayUtils.toString(parameters));
   }
 
+  @Override
+  public void addForeignKeys(
+    List<SQLForeignKey> fks) throws InvalidObjectException, MetaException {
+   addForeignKeys(fks, true);
+  }
+
   private void addForeignKeys(
-    List<SQLForeignKey> fks) throws InvalidObjectException,
+    List<SQLForeignKey> fks, boolean retrieveCD) throws InvalidObjectException,
     MetaException {
     List<MConstraint> mpkfks = new ArrayList<MConstraint>();
     String currentConstraintName = null;
 
     for (int i = 0; i < fks.size(); i++) {
-      MTable parentTable =
-        getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name());
+      AttachedMTableInfo nParentTable = getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), retrieveCD);
+      MTable parentTable = nParentTable.mtbl;
       if (parentTable == null) {
         throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name());
       }
-      MTable childTable =
-        getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name());
+
+      AttachedMTableInfo nChildTable = getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), retrieveCD);
+      MTable childTable = nChildTable.mtbl;
       if (childTable == null) {
         throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name());
       }
+
+      MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
+      List<MFieldSchema> parentCols = parentCD.getCols();
       int parentIntegerIndex =
-        getColumnIndexForTable(parentTable, fks.get(i).getPkcolumn_name());
+        getColumnIndexFromTableColumns(parentCols, fks.get(i).getPkcolumn_name());
       if (parentIntegerIndex == -1) {
         throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name());
       }
+
+      MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
+      List<MFieldSchema> childCols = childCD.getCols();
       int childIntegerIndex =
-        getColumnIndexForTable(childTable, fks.get(i).getFkcolumn_name());
+        getColumnIndexFromTableColumns(childCols, fks.get(i).getFkcolumn_name());
       if (childIntegerIndex == -1) {
-        throw new InvalidObjectException("Child column not found" + fks.get(i).getFkcolumn_name());
+        throw new InvalidObjectException("Child column not found: " + fks.get(i).getFkcolumn_name());
       }
+
       if (fks.get(i).getFk_name() == null) {
         // When there is no explicit foreign key name associated with the constraint and the key is composite,
         // we expect the foreign keys to be send in order in the input list.
@@ -3407,8 +3451,8 @@ public class ObjectStore implements RawStore, Configurable {
         enableValidateRely,
         parentTable,
         childTable,
-        parentTable.getSd().getCD(),
-        childTable.getSd().getCD(),
+        parentCD,
+        childCD,
         childIntegerIndex,
         parentIntegerIndex
       );
@@ -3417,18 +3461,29 @@ public class ObjectStore implements RawStore, Configurable {
     pm.makePersistentAll(mpkfks);
   }
 
-  private void addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException,
+  @Override
+  public void addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException,
+    MetaException {
+    addPrimaryKeys(pks, true);
+  }
+
+  private void addPrimaryKeys(List<SQLPrimaryKey> pks, boolean retrieveCD) throws InvalidObjectException,
     MetaException {
     List<MConstraint> mpks = new ArrayList<MConstraint>();
     String constraintName = null;
+
     for (int i = 0; i < pks.size(); i++) {
-      MTable parentTable =
-        getMTable(pks.get(i).getTable_db(), pks.get(i).getTable_name());
+      AttachedMTableInfo nParentTable =
+        getMTable(pks.get(i).getTable_db(), pks.get(i).getTable_name(), retrieveCD);
+      MTable parentTable = nParentTable.mtbl;
       if (parentTable == null) {
         throw new InvalidObjectException("Parent table not found: " + pks.get(i).getTable_name());
       }
+
+      MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
       int parentIntegerIndex =
-        getColumnIndexForTable(parentTable, pks.get(i).getColumn_name());
+        getColumnIndexFromTableColumns(parentCD.getCols(), pks.get(i).getColumn_name());
+
       if (parentIntegerIndex == -1) {
         throw new InvalidObjectException("Parent column not found: " + pks.get(i).getColumn_name());
       }
@@ -3445,6 +3500,7 @@ public class ObjectStore implements RawStore, Configurable {
       } else {
         constraintName = pks.get(i).getPk_name();
       }
+
       int enableValidateRely = (pks.get(i).isEnable_cstr() ? 4 : 0) +
       (pks.get(i).isValidate_cstr() ? 2 : 0) + (pks.get(i).isRely_cstr() ? 1 : 0);
       MConstraint mpk = new MConstraint(
@@ -3456,7 +3512,7 @@ public class ObjectStore implements RawStore, Configurable {
         enableValidateRely,
         parentTable,
         null,
-        parentTable.getSd().getCD(),
+        parentCD,
         null,
         null,
         parentIntegerIndex);

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 07d8119..a6d3f53 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -674,4 +674,8 @@ public interface RawStore extends Configurable {
     List<SQLForeignKey> foreignKeys) throws InvalidObjectException, MetaException;
 
   void dropConstraint(String dbName, String tableName, String constraintName) throws NoSuchObjectException;
+
+  void addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException, MetaException;
+
+  void addForeignKeys(List<SQLForeignKey> fks) throws InvalidObjectException, MetaException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index e97d4a9..31f0d7b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -2599,4 +2599,16 @@ public class HBaseStore implements RawStore {
     String constraintName) throws NoSuchObjectException {
     // TODO Auto-generated method stub 
   }
+
+  @Override
+  public void addPrimaryKeys(List<SQLPrimaryKey> pks)
+    throws InvalidObjectException, MetaException {
+    // TODO Auto-generated method stub
+  }
+
+  @Override
+  public void addForeignKeys(List<SQLForeignKey> fks)
+    throws InvalidObjectException, MetaException {
+    // TODO Auto-generated method stub
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index 7f4b77e..3152e77 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -845,4 +845,16 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
    String constraintName) throws NoSuchObjectException {
    // TODO Auto-generated method stub
   }
+
+  @Override
+  public void addPrimaryKeys(List<SQLPrimaryKey> pks)
+    throws InvalidObjectException, MetaException {
+    // TODO Auto-generated method stub
+  }
+
+  @Override
+  public void addForeignKeys(List<SQLForeignKey> fks)
+    throws InvalidObjectException, MetaException {
+    // TODO Auto-generated method stub
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index 25ccced..86a2436 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -861,6 +861,18 @@ public class DummyRawStoreForJdoConnection implements RawStore {
   String constraintName) throws NoSuchObjectException {
     // TODO Auto-generated method stub
   }
+
+  @Override
+  public void addPrimaryKeys(List<SQLPrimaryKey> pks)
+    throws InvalidObjectException, MetaException {
+    // TODO Auto-generated method stub
+  }
+
+  @Override
+  public void addForeignKeys(List<SQLForeignKey> fks)
+    throws InvalidObjectException, MetaException {
+    // TODO Auto-generated method stub
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 0204fcd..76bc545 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -362,6 +362,8 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       if (alterTbl != null) {
         if (alterTbl.getOp() == AlterTableTypes.DROPCONSTRAINT ) {
           return dropConstraint(db, alterTbl);
+        } else if (alterTbl.getOp() == AlterTableTypes.ADDCONSTRAINT) {
+          return addConstraint(db, alterTbl);
         } else {
           return alterTable(db, alterTbl);
         }
@@ -3631,6 +3633,21 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
      return 0;
    }
 
+   private int addConstraint(Hive db, AlterTableDesc alterTbl)
+    throws SemanticException, HiveException {
+    try {
+    // This is either an alter table add foreign key or add primary key command.
+    if (!alterTbl.getForeignKeyCols().isEmpty()) {
+       db.addForeignKey(alterTbl.getForeignKeyCols());
+     } else if (!alterTbl.getPrimaryKeyCols().isEmpty()) {
+       db.addPrimaryKey(alterTbl.getPrimaryKeyCols());
+     }
+    } catch (NoSuchObjectException e) {
+      throw new HiveException(e);
+    }
+    return 0;
+  }
+
    /**
    * Drop a given table or some partitions. DropTableDesc is currently used for both.
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
index a1f9f18..edf5d85 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.metadata;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.List;
+import java.util.TreeMap;
 
 import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 
@@ -64,7 +64,7 @@ public class ForeignKeyInfo implements Serializable {
   public ForeignKeyInfo(List<SQLForeignKey> fks, String childTableName, String childDatabaseName) {
     this.childTableName = childTableName;
     this.childDatabaseName = childDatabaseName;
-    foreignKeys = new HashMap<String, List<ForeignKeyCol>>();
+    foreignKeys = new TreeMap<String, List<ForeignKeyCol>>();
     if (fks == null) {
       return;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 981b961..26f61c5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -3656,4 +3656,21 @@ private void constructOneLBLocationMap(FileStatus fSta,
     }
   }
 
+  public void addPrimaryKey(List<SQLPrimaryKey> primaryKeyCols)
+    throws HiveException, NoSuchObjectException {
+    try {
+      getMSC().addPrimaryKey(primaryKeyCols);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  public void addForeignKey(List<SQLForeignKey> foreignKeyCols)
+    throws HiveException, NoSuchObjectException {
+    try {
+      getMSC().addForeignKey(foreignKeyCols);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
 };

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 7fcbd6a..4a9db9e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -706,13 +706,50 @@ public abstract class BaseSemanticAnalyzer {
   }
 
   /**
+   * Process the primary keys from the ast nodes and populate the SQLPrimaryKey list.
+   * As of now, this is used by 'alter table add constraint' command. We expect constraint
+   * name to be user specified.
+   * @param parent Parent of the primary key token node
+   * @param child Child of the primary key token node containing the primary key columns details
+   * @param primaryKeys SQLPrimaryKey list to be populated by this function
+   * @throws SemanticException
+   */
+  protected static void processPrimaryKeys(ASTNode parent, ASTNode child, List<SQLPrimaryKey> primaryKeys)
+    throws SemanticException {
+    int relyIndex = 4;
+    int cnt = 1;
+    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
+    for (int j = 0; j < child.getChild(1).getChildCount(); j++) {
+     Tree grandChild = child.getChild(1).getChild(j);
+     boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE;
+     boolean enable =  child.getChild(relyIndex+1).getType() == HiveParser.TOK_ENABLE;
+     boolean validate =  child.getChild(relyIndex+2).getType() == HiveParser.TOK_VALIDATE;
+     if (enable) {
+       throw new SemanticException(
+         ErrorMsg.INVALID_PK_SYNTAX.getMsg(" ENABLE feature not supported yet"));
+     }
+     if (validate) {
+       throw new SemanticException(
+         ErrorMsg.INVALID_PK_SYNTAX.getMsg(" VALIDATE feature not supported yet"));
+     }
+     primaryKeys.add(
+       new SQLPrimaryKey(
+         qualifiedTabName[0], qualifiedTabName[1],
+         unescapeIdentifier(grandChild.getText().toLowerCase()),
+         cnt++,
+         unescapeIdentifier(child.getChild(3).getText().toLowerCase()), false, false,
+         rely));
+    }
+  }
+
+  /**
    * Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list
    * @param parent  Parent of the foreign key token node
    * @param child Foreign Key token node
    * @param foreignKeys SQLForeignKey list
    * @throws SemanticException
    */
-  private static void processForeignKeys(
+  protected static void processForeignKeys(
     ASTNode parent, ASTNode child, List<SQLForeignKey> foreignKeys) throws SemanticException {
     String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
     // The ANTLR grammar looks like :

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0a892e8..0d735b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -40,6 +40,8 @@ import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -70,6 +72,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.PKInfo;
 import org.apache.hadoop.hive.ql.parse.authorization.AuthorizationParseUtils;
 import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactory;
 import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl;
@@ -321,8 +324,10 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         analyzeAlterTableCompact(ast, tableName, partSpec);
       } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS){
         analyzeAlterTableUpdateStats(ast, tableName, partSpec);
-      }  else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) {
+      } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) {
         analyzeAlterTableDropConstraint(ast, tableName);
+      } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) {
+          analyzeAlterTableAddConstraint(ast, tableName);
       }
       break;
     }
@@ -1754,6 +1759,24 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         alterTblDesc), conf));
   }
 
+  private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName)
+    throws SemanticException {
+    ASTNode parent = (ASTNode) ast.getParent();
+    ASTNode child = (ASTNode) ast.getChild(0);
+    List<SQLPrimaryKey> primaryKeys = new ArrayList<SQLPrimaryKey>();
+    List<SQLForeignKey> foreignKeys = new ArrayList<SQLForeignKey>();
+
+    if (child.getToken().getType() == HiveParser.TOK_PRIMARY_KEY) {
+      BaseSemanticAnalyzer.processPrimaryKeys(parent, child, primaryKeys);
+    } else if (child.getToken().getType() == HiveParser.TOK_FOREIGN_KEY) {
+      BaseSemanticAnalyzer.processForeignKeys(parent, child, foreignKeys);
+    }
+    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys);
+
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+        alterTblDesc), conf));
+  }
+
   static HashMap<String, String> getProps(ASTNode prop) {
     // Must be deterministic order map for consistent q-test output across Java versions
     HashMap<String, String> mapProp = new LinkedHashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 685ab6a..e0a84c1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -180,6 +180,7 @@ TOK_ALTERTABLE_BUCKETS;
 TOK_ALTERTABLE_CLUSTER_SORT;
 TOK_ALTERTABLE_COMPACT;
 TOK_ALTERTABLE_DROPCONSTRAINT;
+TOK_ALTERTABLE_ADDCONSTRAINT;
 TOK_ALTERINDEX_REBUILD;
 TOK_ALTERINDEX_PROPERTIES;
 TOK_MSCK;
@@ -1046,6 +1047,7 @@ alterTableStatementSuffix
     | alterStatementSuffixExchangePartition
     | alterStatementPartitionKeyType
     | alterStatementSuffixDropConstraint
+    | alterStatementSuffixAddConstraint
     | partitionSpec? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix partitionSpec?
     ;
 
@@ -1135,6 +1137,14 @@ alterStatementSuffixAddCol
     ->                 ^(TOK_ALTERTABLE_REPLACECOLS columnNameTypeList restrictOrCascade?)
     ;
 
+alterStatementSuffixAddConstraint
+@init { pushMsg("add constraint statement", state); }
+@after { popMsg(state); }
+   :  KW_ADD (fk=foreignKeyWithName | primaryKeyWithName)
+   -> {fk != null}? ^(TOK_ALTERTABLE_ADDCONSTRAINT foreignKeyWithName)
+   ->               ^(TOK_ALTERTABLE_ADDCONSTRAINT primaryKeyWithName)
+   ;
+
 alterStatementSuffixDropConstraint
 @init { pushMsg("drop constraint statement", state); }
 @after { popMsg(state); }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 23a7f6e..efb0ef0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -63,6 +63,7 @@ public final class SemanticAnalyzerFactory {
     commandType.put(HiveParser.TOK_ALTERTABLE_DROPPROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
     commandType.put(HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION, HiveOperation.ALTERTABLE_EXCHANGEPARTITION);
     commandType.put(HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT, HiveOperation.ALTERTABLE_DROPCONSTRAINT);
+    commandType.put(HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT, HiveOperation.ALTERTABLE_ADDCONSTRAINT);
     commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES);
     commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES);
     commandType.put(HiveParser.TOK_SHOWCOLUMNS, HiveOperation.SHOWCOLUMNS);
@@ -198,6 +199,7 @@ public final class SemanticAnalyzerFactory {
           case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
           case HiveParser.TOK_ALTERTABLE_SKEWED:
           case HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT:
+          case HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT:
           queryState.setCommandType(commandType.get(child.getType()));
           return new DDLSemanticAnalyzer(queryState);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
index 38d8d5a..b83c16d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.plan;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
@@ -56,7 +58,7 @@ public class AlterTableDesc extends DDLDesc implements Serializable {
     DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), ADDSKEWEDBY("add skew column"),
     ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"),
     ALTERPARTITION("alter partition"), COMPACT("compact"),
-    TRUNCATE("truncate"), MERGEFILES("merge files"), DROPCONSTRAINT("drop constraint");
+    TRUNCATE("truncate"), MERGEFILES("merge files"), DROPCONSTRAINT("drop constraint"), ADDCONSTRAINT("add constraint");
     ;
 
     private final String name;
@@ -117,6 +119,8 @@ public class AlterTableDesc extends DDLDesc implements Serializable {
   boolean isCascade = false;
   EnvironmentContext environmentContext;
   String dropConstraintName;
+  List<SQLPrimaryKey> primaryKeyCols;
+  List<SQLForeignKey> foreignKeyCols;
 
   public AlterTableDesc() {
   }
@@ -270,6 +274,13 @@ public class AlterTableDesc extends DDLDesc implements Serializable {
     op = AlterTableTypes.DROPCONSTRAINT;
   }
 
+  public AlterTableDesc(String tableName, List<SQLPrimaryKey> primaryKeyCols, List<SQLForeignKey> foreignKeyCols) {
+    this.oldName = tableName;
+    this.primaryKeyCols = primaryKeyCols;
+    this.foreignKeyCols = foreignKeyCols;
+    op = AlterTableTypes.ADDCONSTRAINT;
+  }
+
   @Explain(displayName = "new columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getNewColsString() {
     return Utilities.getFieldSchemaString(getNewCols());
@@ -415,6 +426,36 @@ public class AlterTableDesc extends DDLDesc implements Serializable {
   }
 
   /**
+   * @param primaryKeyCols
+   *          the primary key cols to set
+   */
+  public void setPrimaryKeyCols(List<SQLPrimaryKey> primaryKeyCols) {
+    this.primaryKeyCols = primaryKeyCols;
+  }
+
+  /**
+   * @return the primary key cols
+   */
+  public List<SQLPrimaryKey> getPrimaryKeyCols() {
+    return primaryKeyCols;
+  }
+
+  /**
+   * @param foreignKeyCols
+   *          the foreign key cols to set
+   */
+  public void setForeignKeyCols(List<SQLForeignKey> foreignKeyCols) {
+    this.foreignKeyCols = foreignKeyCols;
+  }
+
+  /**
+   * @return the foreign key cols
+   */
+  public List<SQLForeignKey> getForeignKeyCols() {
+    return foreignKeyCols;
+  }
+
+  /**
    * @return the drop constraint name of the table
    */
   @Explain(displayName = "drop constraint name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index c6044b9..c2895dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -117,6 +117,8 @@ public enum HiveOperation {
   ALTERTABLE_EXCHANGEPARTITION("ALTERTABLE_EXCHANGEPARTITION", null, null),
   ALTERTABLE_DROPCONSTRAINT("ALTERTABLE_DROPCONSTRAINT",
       new Privilege[]{Privilege.ALTER_METADATA}, null),
+  ALTERTABLE_ADDCONSTRAINT("ALTERTABLE_ADDCONSTRAINT",
+      new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[] {Privilege.ALTER_METADATA}, null),
   ALTERVIEW_AS("ALTERVIEW_AS", new Privilege[] {Privilege.ALTER_METADATA}, null),
   ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA}),

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
index 810da48..c507f67 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -53,6 +53,7 @@ public enum HiveOperationType {
   ALTERTABLE_SERIALIZER,
   ALTERTABLE_PARTCOLTYPE,
   ALTERTABLE_DROPCONSTRAINT,
+  ALTERTABLE_ADDCONSTRAINT,
   ALTERPARTITION_SERIALIZER,
   ALTERTABLE_SERDEPROPERTIES,
   ALTERPARTITION_SERDEPROPERTIES,

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index 3f138fb..21ae8fb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -233,6 +233,8 @@ public class Operation2Privilege {
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_DROPCONSTRAINT, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_ADDCONSTRAINT, PrivRequirement.newIOPrivRequirement
+(OWNER_PRIV_AR, OWNER_PRIV_AR));
 
     //table ownership for create/drop/alter index
     op2Priv.put(HiveOperationType.CREATEINDEX, PrivRequirement.newIOPrivRequirement

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/test/queries/clientnegative/alter_table_constraint_duplicate_pk.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/alter_table_constraint_duplicate_pk.q b/ql/src/test/queries/clientnegative/alter_table_constraint_duplicate_pk.q
new file mode 100644
index 0000000..f77eb29
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_constraint_duplicate_pk.q
@@ -0,0 +1,2 @@
+CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable novalidate);
+alter table table1 add constraint pk4 primary key (b) disable novalidate rely;

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col1.q b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col1.q
new file mode 100644
index 0000000..e12808d
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col1.q
@@ -0,0 +1,3 @@
+CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable novalidate);
+CREATE TABLE table2 (a STRING, b STRING, primary key (a) disable novalidate rely);
+alter table table2 add constraint fk1 foreign key (c) references table1(a) disable novalidate;

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col2.q b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col2.q
new file mode 100644
index 0000000..97703de
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_col2.q
@@ -0,0 +1,3 @@
+CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable novalidate);
+CREATE TABLE table2 (a STRING, b STRING, primary key (a) disable novalidate rely);
+alter table table2 add constraint fk1 foreign key (b) references table1(c) disable novalidate;

http://git-wip-us.apache.org/repos/asf/hive/blob/b36f6a3a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_tbl1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_tbl1.q b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_tbl1.q
new file mode 100644
index 0000000..dcd7839
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_fk_tbl1.q
@@ -0,0 +1,3 @@
+CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable novalidate);
+CREATE TABLE table2 (a STRING, b STRING, primary key (a) disable novalidate rely);
+alter table table3 add constraint fk1 foreign key (c) references table1(a) disable novalidate;