You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2016/05/10 09:31:58 UTC

[2/2] hive git commit: HIVE-13598: Describe extended table should show the primary keys/foreign keys associated with the table (Hari Subramaniyan, reviewed by Ashutosh Chauhan)

HIVE-13598: Describe extended table should show the primary keys/foreign keys associated with the table (Hari Subramaniyan, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/882a7f00
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/882a7f00
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/882a7f00

Branch: refs/heads/master
Commit: 882a7f0005d95800ed5657f8a7606301d22cb228
Parents: adca687
Author: Hari Subramaniyan <ha...@apache.org>
Authored: Tue May 10 02:31:40 2016 -0700
Committer: Hari Subramaniyan <ha...@apache.org>
Committed: Tue May 10 02:31:40 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   4 +-
 metastore/if/hive_metastore.thrift              |   8 +-
 .../upgrade/derby/034-HIVE-13076.derby.sql      |   2 +-
 .../upgrade/derby/hive-schema-2.1.0.derby.sql   |   2 +-
 .../upgrade/mssql/019-HIVE-13076.mssql.sql      |   2 +
 .../upgrade/mssql/hive-schema-2.1.0.mssql.sql   |   2 +
 .../upgrade/mysql/034-HIVE-13076.mysql.sql      |   2 +
 .../upgrade/mysql/hive-schema-2.1.0.mysql.sql   |   2 +
 .../upgrade/oracle/034-HIVE-13076.oracle.sql    |   2 +
 .../upgrade/oracle/hive-schema-2.1.0.oracle.sql |   2 +
 .../postgres/033-HIVE-13076.postgres.sql        |   2 +
 .../postgres/hive-schema-2.1.0.postgres.sql     |   2 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  23 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   9 +
 .../hive/metastore/api/ForeignKeysRequest.java  |  79 +-
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   8 -
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   4 -
 .../hive/metastore/MetaStoreDirectSql.java      |  24 +-
 .../hadoop/hive/metastore/ObjectStore.java      |  79 +-
 .../hive/metastore/model/MConstraint.java       |  43 +-
 metastore/src/model/package.jdo                 |   6 +
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  11 +-
 .../hadoop/hive/ql/metadata/ForeignKeyInfo.java | 136 +++
 .../apache/hadoop/hive/ql/metadata/Hive.java    |  36 +
 .../hadoop/hive/ql/metadata/PrimaryKeyInfo.java | 107 +++
 .../formatting/JsonMetaDataFormatter.java       |  10 +-
 .../formatting/MetaDataFormatUtils.java         |  67 ++
 .../metadata/formatting/MetaDataFormatter.java  |   6 +-
 .../formatting/TextMetaDataFormatter.java       |  23 +-
 .../clientpositive/create_with_constraints.q    |  40 +
 .../create_with_constraints.q.out               | 861 +++++++++++++++++++
 31 files changed, 1476 insertions(+), 128 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 9863d6c..9e3aefb 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1468,7 +1468,9 @@ public class QTestUtil {
       ".*Input:.*/data/files/.*",
       ".*Output:.*/data/files/.*",
       ".*total number of created files now is.*",
-      ".*.hive-staging.*"
+      ".*.hive-staging.*",
+      "pk_-?[0-9]*_[0-9]*_[0-9]*",
+      "fk_-?[0-9]*_[0-9]*_[0-9]*"
   });
 
   private final Pattern[] partialReservedPlanMask = toPattern(new String[] {

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/if/hive_metastore.thrift
----------------------------------------------------------------------
diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift
index 44dc824..2eac836 100755
--- a/metastore/if/hive_metastore.thrift
+++ b/metastore/if/hive_metastore.thrift
@@ -477,10 +477,10 @@ struct PrimaryKeysResponse {
 }
 
 struct ForeignKeysRequest {
-  1: required string parent_db_name,
-  2: required string parent_tbl_name,
-  3: required string foreign_db_name,
-  4: required string foreign_tbl_name
+  1: string parent_db_name,
+  2: string parent_tbl_name,
+  3: string foreign_db_name,
+  4: string foreign_tbl_name
 }
 
 struct ForeignKeysResponse {

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql b/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql
index b062c56..acf93ef 100644
--- a/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql
+++ b/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql
@@ -1,3 +1,3 @@
-CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL,  "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL);
+CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" INTEGER, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_INTEGER_IDX" INTEGER NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL,  "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL);
 ALTER TABLE "APP"."KEY_CONSTRAINTS" ADD CONSTRAINT "CONSTRAINTS_PK" PRIMARY KEY ("CONSTRAINT_NAME", "POSITION");
 CREATE INDEX "APP"."CONSTRAINTS_PARENT_TBL_ID_INDEX" ON "APP"."KEY_CONSTRAINTS"("PARENT_TBL_ID");

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql b/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql
index e5d3e89..43fd4a9 100644
--- a/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql
+++ b/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql
@@ -106,7 +106,7 @@ CREATE TABLE "APP"."NOTIFICATION_LOG" ("NL_ID" BIGINT NOT NULL, "DB_NAME" VARCHA
 
 CREATE TABLE "APP"."NOTIFICATION_SEQUENCE" ("NNI_ID" BIGINT NOT NULL, "NEXT_EVENT_ID" BIGINT NOT NULL);
 
-CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL,  "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL);
+CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" INTEGER NOT NULL, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_INTEGER_IDX" INTEGER, "PARENT_TBL_ID" BIGINT NOT NULL,  "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL);
 
 ALTER TABLE "APP"."KEY_CONSTRAINTS" ADD CONSTRAINT "CONSTRAINTS_PK" PRIMARY KEY ("CONSTRAINT_NAME", "POSITION");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql b/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql
index 00ddb73..7fce333 100644
--- a/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql
@@ -1,8 +1,10 @@
 CREATE TABLE KEY_CONSTRAINTS
 (
   CHILD_CD_ID BIGINT,
+  CHILD_INTEGER_IDX INT,
   CHILD_TBL_ID BIGINT,
   PARENT_CD_ID BIGINT NOT NULL,
+  PARENT_INTEGER_IDX INT NOT NULL,
   PARENT_TBL_ID BIGINT NOT NULL,
   POSITION INT NOT NULL,
   CONSTRAINT_NAME VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql b/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql
index 4f0fdd6..5d90cfc 100644
--- a/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql
+++ b/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql
@@ -984,8 +984,10 @@ CREATE TABLE AUX_TABLE (
 CREATE TABLE KEY_CONSTRAINTS
 (
   CHILD_CD_ID BIGINT,
+  CHILD_INTEGER_IDX INT,
   CHILD_TBL_ID BIGINT,
   PARENT_CD_ID BIGINT NOT NULL,
+  PARENT_INTEGER_IDX INT NOT NULL,
   PARENT_TBL_ID BIGINT NOT NULL,
   POSITION INT NOT NULL,
   CONSTRAINT_NAME VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql b/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql
index c9a5e1d..d5e7213 100644
--- a/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql
+++ b/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql
@@ -1,8 +1,10 @@
 CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
 (
   `CHILD_CD_ID` BIGINT,
+  `CHILD_INTEGER_IDX` INT(11),
   `CHILD_TBL_ID` BIGINT,
   `PARENT_CD_ID` BIGINT NOT NULL,
+  `PARENT_INTEGER_IDX` INT(11) NOT NULL,
   `PARENT_TBL_ID` BIGINT NOT NULL,
   `POSITION` BIGINT NOT NULL,
   `CONSTRAINT_NAME` VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql b/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql
index 5c03b65..81fd991 100644
--- a/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql
+++ b/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql
@@ -811,8 +811,10 @@ CREATE TABLE IF NOT EXISTS `NOTIFICATION_SEQUENCE`
 CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
 (
   `CHILD_CD_ID` BIGINT,
+  `CHILD_INTEGER_IDX` INT(11),
   `CHILD_TBL_ID` BIGINT,
   `PARENT_CD_ID` BIGINT NOT NULL,
+  `PARENT_INTEGER_IDX` INT(11) NOT NULL,
   `PARENT_TBL_ID` BIGINT NOT NULL,
   `POSITION` BIGINT NOT NULL,
   `CONSTRAINT_NAME` VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql b/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql
index baf855c..5bbd197 100644
--- a/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql
+++ b/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql
@@ -1,8 +1,10 @@
 CREATE TABLE IF NOT EXISTS  KEY_CONSTRAINTS
 (
   CHILD_CD_ID NUMBER,
+  CHILD_INTEGER_IDX NUMBER,
   CHILD_TBL_ID NUMBER,
   PARENT_CD_ID NUMBER NOT NULL,
+  PARENT_INTEGER_IDX NUMBER NOT NULL,
   PARENT_TBL_ID NUMBER NOT NULL,
   POSITION NUMBER NOT NULL,
   CONSTRAINT_NAME VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql b/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql
index 8420661..f3dcccd 100644
--- a/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql
+++ b/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql
@@ -780,8 +780,10 @@ CREATE INDEX FUNC_RU_N49 ON FUNC_RU (FUNC_ID);
 CREATE TABLE KEY_CONSTRAINTS
 (
   CHILD_CD_ID NUMBER,
+  CHILD_INTEGER_IDX NUMBER,
   CHILD_TBL_ID NUMBER,
   PARENT_CD_ID NUMBER NOT NULL,
+  PARENT_INTEGER_IDX NUMBER NOT NULL,
   PARENT_TBL_ID NUMBER NOT NULL,
   POSITION NUMBER NOT NULL,
   CONSTRAINT_NAME VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql b/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql
index ec1fb48..9ee7c11 100644
--- a/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql
@@ -1,8 +1,10 @@
 CREATE TABLE IF NOT EXISTS  "KEY_CONSTRAINTS"
 (
   "CHILD_CD_ID" BIGINT,
+  "CHILD_INTEGER_IDX" BIGINT,
   "CHILD_TBL_ID" BIGINT,
   "PARENT_CD_ID" BIGINT NOT NULL,
+  "PARENT_INTEGER_IDX" BIGINT NOT NULL,
   "PARENT_TBL_ID" BIGINT NOT NULL,
   "POSITION" BIGINT NOT NULL,
   "CONSTRAINT_NAME" VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql b/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql
index f9bf2a3..9992c62 100644
--- a/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql
+++ b/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql
@@ -588,8 +588,10 @@ CREATE TABLE "NOTIFICATION_SEQUENCE"
 CREATE TABLE "KEY_CONSTRAINTS"
 (
   "CHILD_CD_ID" BIGINT,
+  "CHILD_INTEGER_IDX" BIGINT,
   "CHILD_TBL_ID" BIGINT,
   "PARENT_CD_ID" BIGINT NOT NULL,
+  "PARENT_INTEGER_IDX" BIGINT NOT NULL,
   "PARENT_TBL_ID" BIGINT NOT NULL,
   "POSITION" BIGINT NOT NULL,
   "CONSTRAINT_NAME" VARCHAR(400) NOT NULL,

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index f0cd007..d0c24de 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -8770,10 +8770,6 @@ uint32_t ForeignKeysRequest::read(::apache::thrift::protocol::TProtocol* iprot)
 
   using ::apache::thrift::protocol::TProtocolException;
 
-  bool isset_parent_db_name = false;
-  bool isset_parent_tbl_name = false;
-  bool isset_foreign_db_name = false;
-  bool isset_foreign_tbl_name = false;
 
   while (true)
   {
@@ -8786,7 +8782,7 @@ uint32_t ForeignKeysRequest::read(::apache::thrift::protocol::TProtocol* iprot)
       case 1:
         if (ftype == ::apache::thrift::protocol::T_STRING) {
           xfer += iprot->readString(this->parent_db_name);
-          isset_parent_db_name = true;
+          this->__isset.parent_db_name = true;
         } else {
           xfer += iprot->skip(ftype);
         }
@@ -8794,7 +8790,7 @@ uint32_t ForeignKeysRequest::read(::apache::thrift::protocol::TProtocol* iprot)
       case 2:
         if (ftype == ::apache::thrift::protocol::T_STRING) {
           xfer += iprot->readString(this->parent_tbl_name);
-          isset_parent_tbl_name = true;
+          this->__isset.parent_tbl_name = true;
         } else {
           xfer += iprot->skip(ftype);
         }
@@ -8802,7 +8798,7 @@ uint32_t ForeignKeysRequest::read(::apache::thrift::protocol::TProtocol* iprot)
       case 3:
         if (ftype == ::apache::thrift::protocol::T_STRING) {
           xfer += iprot->readString(this->foreign_db_name);
-          isset_foreign_db_name = true;
+          this->__isset.foreign_db_name = true;
         } else {
           xfer += iprot->skip(ftype);
         }
@@ -8810,7 +8806,7 @@ uint32_t ForeignKeysRequest::read(::apache::thrift::protocol::TProtocol* iprot)
       case 4:
         if (ftype == ::apache::thrift::protocol::T_STRING) {
           xfer += iprot->readString(this->foreign_tbl_name);
-          isset_foreign_tbl_name = true;
+          this->__isset.foreign_tbl_name = true;
         } else {
           xfer += iprot->skip(ftype);
         }
@@ -8824,14 +8820,6 @@ uint32_t ForeignKeysRequest::read(::apache::thrift::protocol::TProtocol* iprot)
 
   xfer += iprot->readStructEnd();
 
-  if (!isset_parent_db_name)
-    throw TProtocolException(TProtocolException::INVALID_DATA);
-  if (!isset_parent_tbl_name)
-    throw TProtocolException(TProtocolException::INVALID_DATA);
-  if (!isset_foreign_db_name)
-    throw TProtocolException(TProtocolException::INVALID_DATA);
-  if (!isset_foreign_tbl_name)
-    throw TProtocolException(TProtocolException::INVALID_DATA);
   return xfer;
 }
 
@@ -8867,6 +8855,7 @@ void swap(ForeignKeysRequest &a, ForeignKeysRequest &b) {
   swap(a.parent_tbl_name, b.parent_tbl_name);
   swap(a.foreign_db_name, b.foreign_db_name);
   swap(a.foreign_tbl_name, b.foreign_tbl_name);
+  swap(a.__isset, b.__isset);
 }
 
 ForeignKeysRequest::ForeignKeysRequest(const ForeignKeysRequest& other367) {
@@ -8874,12 +8863,14 @@ ForeignKeysRequest::ForeignKeysRequest(const ForeignKeysRequest& other367) {
   parent_tbl_name = other367.parent_tbl_name;
   foreign_db_name = other367.foreign_db_name;
   foreign_tbl_name = other367.foreign_tbl_name;
+  __isset = other367.__isset;
 }
 ForeignKeysRequest& ForeignKeysRequest::operator=(const ForeignKeysRequest& other368) {
   parent_db_name = other368.parent_db_name;
   parent_tbl_name = other368.parent_tbl_name;
   foreign_db_name = other368.foreign_db_name;
   foreign_tbl_name = other368.foreign_tbl_name;
+  __isset = other368.__isset;
   return *this;
 }
 void ForeignKeysRequest::printTo(std::ostream& out) const {

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index d32cdeb..7afa2be 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -3683,6 +3683,13 @@ inline std::ostream& operator<<(std::ostream& out, const PrimaryKeysResponse& ob
   return out;
 }
 
+typedef struct _ForeignKeysRequest__isset {
+  _ForeignKeysRequest__isset() : parent_db_name(false), parent_tbl_name(false), foreign_db_name(false), foreign_tbl_name(false) {}
+  bool parent_db_name :1;
+  bool parent_tbl_name :1;
+  bool foreign_db_name :1;
+  bool foreign_tbl_name :1;
+} _ForeignKeysRequest__isset;
 
 class ForeignKeysRequest {
  public:
@@ -3698,6 +3705,8 @@ class ForeignKeysRequest {
   std::string foreign_db_name;
   std::string foreign_tbl_name;
 
+  _ForeignKeysRequest__isset __isset;
+
   void __set_parent_db_name(const std::string& val);
 
   void __set_parent_tbl_name(const std::string& val);

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ForeignKeysRequest.java
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ForeignKeysRequest.java b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ForeignKeysRequest.java
index 7788780..d6b1cff 100644
--- a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ForeignKeysRequest.java
+++ b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ForeignKeysRequest.java
@@ -125,13 +125,13 @@ public class ForeignKeysRequest implements org.apache.thrift.TBase<ForeignKeysRe
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.PARENT_DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("parent_db_name", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.PARENT_DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("parent_db_name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.PARENT_TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("parent_tbl_name", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.PARENT_TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("parent_tbl_name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.FOREIGN_DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("foreign_db_name", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.FOREIGN_DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("foreign_db_name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.FOREIGN_TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("foreign_tbl_name", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.FOREIGN_TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("foreign_tbl_name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ForeignKeysRequest.class, metaDataMap);
@@ -533,22 +533,6 @@ public class ForeignKeysRequest implements org.apache.thrift.TBase<ForeignKeysRe
 
   public void validate() throws org.apache.thrift.TException {
     // check for required fields
-    if (!isSetParent_db_name()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'parent_db_name' is unset! Struct:" + toString());
-    }
-
-    if (!isSetParent_tbl_name()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'parent_tbl_name' is unset! Struct:" + toString());
-    }
-
-    if (!isSetForeign_db_name()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'foreign_db_name' is unset! Struct:" + toString());
-    }
-
-    if (!isSetForeign_tbl_name()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'foreign_tbl_name' is unset! Struct:" + toString());
-    }
-
     // check for sub-struct validity
   }
 
@@ -668,23 +652,54 @@ public class ForeignKeysRequest implements org.apache.thrift.TBase<ForeignKeysRe
     @Override
     public void write(org.apache.thrift.protocol.TProtocol prot, ForeignKeysRequest struct) throws org.apache.thrift.TException {
       TTupleProtocol oprot = (TTupleProtocol) prot;
-      oprot.writeString(struct.parent_db_name);
-      oprot.writeString(struct.parent_tbl_name);
-      oprot.writeString(struct.foreign_db_name);
-      oprot.writeString(struct.foreign_tbl_name);
+      BitSet optionals = new BitSet();
+      if (struct.isSetParent_db_name()) {
+        optionals.set(0);
+      }
+      if (struct.isSetParent_tbl_name()) {
+        optionals.set(1);
+      }
+      if (struct.isSetForeign_db_name()) {
+        optionals.set(2);
+      }
+      if (struct.isSetForeign_tbl_name()) {
+        optionals.set(3);
+      }
+      oprot.writeBitSet(optionals, 4);
+      if (struct.isSetParent_db_name()) {
+        oprot.writeString(struct.parent_db_name);
+      }
+      if (struct.isSetParent_tbl_name()) {
+        oprot.writeString(struct.parent_tbl_name);
+      }
+      if (struct.isSetForeign_db_name()) {
+        oprot.writeString(struct.foreign_db_name);
+      }
+      if (struct.isSetForeign_tbl_name()) {
+        oprot.writeString(struct.foreign_tbl_name);
+      }
     }
 
     @Override
     public void read(org.apache.thrift.protocol.TProtocol prot, ForeignKeysRequest struct) throws org.apache.thrift.TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
-      struct.parent_db_name = iprot.readString();
-      struct.setParent_db_nameIsSet(true);
-      struct.parent_tbl_name = iprot.readString();
-      struct.setParent_tbl_nameIsSet(true);
-      struct.foreign_db_name = iprot.readString();
-      struct.setForeign_db_nameIsSet(true);
-      struct.foreign_tbl_name = iprot.readString();
-      struct.setForeign_tbl_nameIsSet(true);
+      BitSet incoming = iprot.readBitSet(4);
+      if (incoming.get(0)) {
+        struct.parent_db_name = iprot.readString();
+        struct.setParent_db_nameIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.parent_tbl_name = iprot.readString();
+        struct.setParent_tbl_nameIsSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.foreign_db_name = iprot.readString();
+        struct.setForeign_db_nameIsSet(true);
+      }
+      if (incoming.get(3)) {
+        struct.foreign_tbl_name = iprot.readString();
+        struct.setForeign_tbl_nameIsSet(true);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 0de4f60..06527e3 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -6158,14 +6158,6 @@ class ForeignKeysRequest:
     oprot.writeStructEnd()
 
   def validate(self):
-    if self.parent_db_name is None:
-      raise TProtocol.TProtocolException(message='Required field parent_db_name is unset!')
-    if self.parent_tbl_name is None:
-      raise TProtocol.TProtocolException(message='Required field parent_tbl_name is unset!')
-    if self.foreign_db_name is None:
-      raise TProtocol.TProtocolException(message='Required field foreign_db_name is unset!')
-    if self.foreign_tbl_name is None:
-      raise TProtocol.TProtocolException(message='Required field foreign_tbl_name is unset!')
     return
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index 6e6439b..d6da518 100644
--- a/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -1364,10 +1364,6 @@ class ForeignKeysRequest
   def struct_fields; FIELDS; end
 
   def validate
-    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field parent_db_name is unset!') unless @parent_db_name
-    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field parent_tbl_name is unset!') unless @parent_tbl_name
-    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field foreign_db_name is unset!') unless @foreign_db_name
-    raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field foreign_tbl_name is unset!') unless @foreign_tbl_name
   end
 
   ::Thrift::Struct.generate_accessors self

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index 744512f..8e0bba6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -1819,23 +1819,27 @@ class MetaStoreDirectSql {
       "SELECT  \"D2\".\"NAME\", \"T2\".\"TBL_NAME\", \"C2\".\"COLUMN_NAME\","
       + "\"DBS\".\"NAME\", \"TBLS\".\"TBL_NAME\", \"COLUMNS_V2\".\"COLUMN_NAME\", "
       + "\"KEY_CONSTRAINTS\".\"POSITION\", \"KEY_CONSTRAINTS\".\"UPDATE_RULE\", \"KEY_CONSTRAINTS\".\"DELETE_RULE\", "
-      + "\"KEY_CONSTRAINTS\".\"CONSTRAINT_NAME\" , \"KEY_CONSTRAINTS2\".\"CONSTRAINT_NAME\", \"KEY_CONSTRAINTS\".\"ENABLE_VALIDATE_RELY\""
+      + "\"KEY_CONSTRAINTS\".\"CONSTRAINT_NAME\" , \"KEY_CONSTRAINTS2\".\"CONSTRAINT_NAME\", \"KEY_CONSTRAINTS\".\"ENABLE_VALIDATE_RELY\" "
       + " FROM \"TBLS\" "
       + " INNER JOIN \"KEY_CONSTRAINTS\" ON \"TBLS\".\"TBL_ID\" = \"KEY_CONSTRAINTS\".\"CHILD_TBL_ID\" "
       + " INNER JOIN \"KEY_CONSTRAINTS\" \"KEY_CONSTRAINTS2\" ON \"KEY_CONSTRAINTS2\".\"PARENT_TBL_ID\"  = \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" "
+      + " AND \"KEY_CONSTRAINTS2\".\"PARENT_CD_ID\"  = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND "
+      + " \"KEY_CONSTRAINTS2\".\"PARENT_INTEGER_IDX\"  = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" "
       + " INNER JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" "
       + " INNER JOIN \"TBLS\" \"T2\" ON  \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" = \"T2\".\"TBL_ID\" "
       + " INNER JOIN \"DBS\" \"D2\" ON \"T2\".\"DB_ID\" = \"D2\".\"DB_ID\" "
-      + " INNER JOIN \"COLUMNS_V2\"  ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"CHILD_CD_ID\" "
-      + " INNER JOIN \"COLUMNS_V2\" \"C2\" ON \"C2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" "
+      + " INNER JOIN \"COLUMNS_V2\"  ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"CHILD_CD_ID\" AND "
+      + " \"COLUMNS_V2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"CHILD_INTEGER_IDX\" "
+      + " INNER JOIN \"COLUMNS_V2\" \"C2\" ON \"C2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND "
+      + " \"C2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" "
       + " WHERE \"KEY_CONSTRAINTS\".\"CONSTRAINT_TYPE\" = "
       + MConstraint.FOREIGN_KEY_CONSTRAINT
       + " AND \"KEY_CONSTRAINTS2\".\"CONSTRAINT_TYPE\" = "
-      + MConstraint.PRIMARY_KEY_CONSTRAINT
-      + (foreign_db_name == null ? "" : "\"DBS\".\"NAME\" = ? AND")
-      + (foreign_tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? AND ")
-      + (parent_tbl_name == null ? "" : " \"T2\".\"TBL_NAME\" = ? AND ")
-      + (parent_db_name == null ? "" : "\"D2\".\"NAME\" = ?") ;
+      + MConstraint.PRIMARY_KEY_CONSTRAINT + " AND"
+      + (foreign_db_name == null ? "" : " \"DBS\".\"NAME\" = ? AND")
+      + (foreign_tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? AND")
+      + (parent_tbl_name == null ? "" : " \"T2\".\"TBL_NAME\" = ? AND")
+      + (parent_db_name == null ? "" : " \"D2\".\"NAME\" = ?") ;
 
     queryText = queryText.trim();
     if (queryText.endsWith("WHERE")) {
@@ -1899,8 +1903,8 @@ class MetaStoreDirectSql {
       + " FROM  \"TBLS\" "
       + " INNER  JOIN \"KEY_CONSTRAINTS\" ON \"TBLS\".\"TBL_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" "
       + " INNER JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" "
-      + " INNER JOIN \"TBLS\" ON \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" = \"TBLS\".\"TBL_ID\" "
-      + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" "
+      + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND "
+      + " \"COLUMNS_V2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" "
       + " WHERE \"KEY_CONSTRAINTS\".\"CONSTRAINT_TYPE\" = "+ MConstraint.PRIMARY_KEY_CONSTRAINT + " AND "
       + (db_name == null ? "" : "\"DBS\".\"NAME\" = ? AND")
       + (tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? ") ;

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 7d64900..fea4fc5 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -3302,15 +3302,15 @@ public class ObjectStore implements RawStore, Configurable {
     return sds;
   }
 
-  private MColumnDescriptor getColumnFromTable(MTable mtbl, String col) {
-   for (MFieldSchema mfs: mtbl.getSd().getCD().getCols()) {
-     if (mfs.getName().equals(col)) {
-       List<MFieldSchema> mfsl = new ArrayList<MFieldSchema>();
-       mfsl.add(mfs);
-       return new MColumnDescriptor(mfsl);
-     }
-   }
-   return null;
+  private int getColumnIndexForTable(MTable mtbl, String col) {
+    List<MFieldSchema> cols = mtbl.getSd().getCD().getCols();
+    for (int i = 0; i < cols.size(); i++) {
+      MFieldSchema mfs = cols.get(i);
+      if (mfs.getName().equalsIgnoreCase(col)) {
+        return i;
+      }
+    }
+    return -1;
   }
 
   private  boolean constraintNameAlreadyExists(String name) {
@@ -3360,22 +3360,22 @@ public class ObjectStore implements RawStore, Configurable {
     for (int i = 0; i < fks.size(); i++) {
       MTable parentTable =
         getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name());
-      MTable childTable =
-        getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name());
-      MColumnDescriptor parentColumn =
-        getColumnFromTable(parentTable, fks.get(i).getPkcolumn_name());
-      MColumnDescriptor childColumn =
-        getColumnFromTable(childTable, fks.get(i).getFkcolumn_name());
       if (parentTable == null) {
         throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name());
       }
+      MTable childTable =
+        getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name());
       if (childTable == null) {
         throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name());
       }
-      if (parentColumn == null) {
+      int parentIntegerIndex =
+        getColumnIndexForTable(parentTable, fks.get(i).getPkcolumn_name());
+      if (parentIntegerIndex == -1) {
         throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name());
       }
-      if (childColumn == null) {
+      int childIntegerIndex =
+        getColumnIndexForTable(childTable, fks.get(i).getFkcolumn_name());
+      if (childIntegerIndex == -1) {
         throw new InvalidObjectException("Child column not found" + fks.get(i).getFkcolumn_name());
       }
       if (fks.get(i).getFk_name() == null) {
@@ -3407,8 +3407,10 @@ public class ObjectStore implements RawStore, Configurable {
         enableValidateRely,
         parentTable,
         childTable,
-        parentColumn,
-        childColumn
+        parentTable.getSd().getCD(),
+        childTable.getSd().getCD(),
+        childIntegerIndex,
+        parentIntegerIndex
       );
       mpkfks.add(mpkfk);
     }
@@ -3422,12 +3424,12 @@ public class ObjectStore implements RawStore, Configurable {
     for (int i = 0; i < pks.size(); i++) {
       MTable parentTable =
         getMTable(pks.get(i).getTable_db(), pks.get(i).getTable_name());
-      MColumnDescriptor parentColumn =
-        getColumnFromTable(parentTable, pks.get(i).getColumn_name());
       if (parentTable == null) {
         throw new InvalidObjectException("Parent table not found: " + pks.get(i).getTable_name());
       }
-      if (parentColumn == null) {
+      int parentIntegerIndex =
+        getColumnIndexForTable(parentTable, pks.get(i).getColumn_name());
+      if (parentIntegerIndex == -1) {
         throw new InvalidObjectException("Parent column not found: " + pks.get(i).getColumn_name());
       }
       if (getPrimaryKeyConstraintName(
@@ -3454,8 +3456,10 @@ public class ObjectStore implements RawStore, Configurable {
         enableValidateRely,
         parentTable,
         null,
-        parentColumn,
-        null);
+        parentTable.getSd().getCD(),
+        null,
+        null,
+        parentIntegerIndex);
       mpks.add(mpk);
     }
     pm.makePersistentAll(mpks);
@@ -8174,7 +8178,7 @@ public class ObjectStore implements RawStore, Configurable {
         boolean rely = (enableValidateRely & 1) != 0;
         primaryKeys.add(new SQLPrimaryKey(db_name,
          tbl_name,
-         currPK.getParentColumn().getCols().get(0).getName(),
+         currPK.getParentColumn().getCols().get(currPK.getParentIntegerIndex()).getName(),
          currPK.getPosition(),
          currPK.getConstraintName(), enable, validate, rely));
       }
@@ -8260,11 +8264,11 @@ public class ObjectStore implements RawStore, Configurable {
     Map<String, String> tblToConstraint = new HashMap<String, String>();
     try {
       openTransaction();
-      String queryText = (parent_tbl_name != null ? "parentTable.tableName == parent_tbl_name &&" : "")
-        + (parent_db_name != null ? "parentTable.database.name == parent_db_name &&" : "")
-        + (foreign_tbl_name != null ? "childTable.tableName == foreign_tbl_name &&" : "")
-        + (parent_db_name != null ? "childTable.database.name == foreign_db_name &&" : "")
-        + "constraintType == MConstraint.FOREIGN_KEY_CONSTRAINT";
+      String queryText = (parent_tbl_name != null ? "parentTable.tableName == parent_tbl_name && " : "")
+        + (parent_db_name != null ? " parentTable.database.name == parent_db_name && " : "")
+        + (foreign_tbl_name != null ? " childTable.tableName == foreign_tbl_name && " : "")
+        + (foreign_db_name != null ? " childTable.database.name == foreign_db_name && " : "")
+        + " constraintType == MConstraint.FOREIGN_KEY_CONSTRAINT";
       queryText = queryText.trim();
       query = pm.newQuery(MConstraint.class, queryText);
       String paramText = (parent_tbl_name == null ? "" : "java.lang.String parent_tbl_name,")
@@ -8286,13 +8290,20 @@ public class ObjectStore implements RawStore, Configurable {
       if (foreign_tbl_name != null) {
         params.add(foreign_tbl_name);
       }
-      if (parent_db_name != null) {
+      if (foreign_db_name != null) {
         params.add(foreign_db_name);
       }
       if (params.size() == 0) {
         constraints = (Collection<?>) query.execute();
+      } else if (params.size() ==1) {
+        constraints = (Collection<?>) query.execute(params.get(0));
+      } else if (params.size() == 2) {
+        constraints = (Collection<?>) query.execute(params.get(0), params.get(1));
+      } else if (params.size() == 3) {
+        constraints = (Collection<?>) query.execute(params.get(0), params.get(1), params.get(2));
       } else {
-        constraints = (Collection<?>) query.executeWithArray(params);
+        constraints = (Collection<?>) query.executeWithArray(params.get(0), params.get(1),
+          params.get(2), params.get(3));
       }
       pm.retrieveAll(constraints);
       foreignKeys = new ArrayList<SQLForeignKey>();
@@ -8316,10 +8327,10 @@ public class ObjectStore implements RawStore, Configurable {
         foreignKeys.add(new SQLForeignKey(
           currPKFK.getParentTable().getDatabase().getName(),
           currPKFK.getParentTable().getDatabase().getName(),
-          currPKFK.getParentColumn().getCols().get(0).getName(),
+          currPKFK.getParentColumn().getCols().get(currPKFK.getParentIntegerIndex()).getName(),
           currPKFK.getChildTable().getDatabase().getName(),
           currPKFK.getChildTable().getTableName(),
-          currPKFK.getChildColumn().getCols().get(0).getName(),
+          currPKFK.getChildColumn().getCols().get(currPKFK.getChildIntegerIndex()).getName(),
           currPKFK.getPosition(),
           currPKFK.getUpdateRule(),
           currPKFK.getDeleteRule(),

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java
----------------------------------------------------------------------
diff --git a/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java b/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java
index 3806e28..5876060 100644
--- a/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java
+++ b/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java
@@ -13,6 +13,8 @@ public class MConstraint
   MTable childTable;
   MColumnDescriptor parentColumn;
   MColumnDescriptor childColumn;
+  Integer childIntegerIndex;
+  Integer parentIntegerIndex;
   int enableValidateRely;
 
   // 0 - Primary Key
@@ -52,18 +54,19 @@ public class MConstraint
   public MConstraint() {}
 
   public MConstraint(String constraintName, int constraintType, int position, Integer deleteRule, Integer updateRule, int enableRelyValidate, MTable parentTable,
-    MTable childTable, MColumnDescriptor parentColumn,
-    MColumnDescriptor childColumn) {
+    MTable childTable, MColumnDescriptor parentColumn, MColumnDescriptor childColumn, Integer childIntegerIndex, Integer parentIntegerIndex) {
    this.constraintName = constraintName;
    this.constraintType = constraintType;
-   this.parentColumn = parentColumn;
    this.parentTable = parentTable;
-   this.childColumn = childColumn;
    this.childTable = childTable;
+   this.parentColumn = parentColumn;
+   this.childColumn = childColumn;
    this.position = position;
    this.deleteRule = deleteRule;
    this.updateRule = updateRule;
    this.enableValidateRely = enableRelyValidate;
+   this.childIntegerIndex = childIntegerIndex;
+   this.parentIntegerIndex = parentIntegerIndex;
   }
 
   public String getConstraintName() {
@@ -106,6 +109,22 @@ public class MConstraint
     this.enableValidateRely = enableValidateRely;
   }
 
+  public Integer getChildIntegerIndex() {
+    return childIntegerIndex;
+  }
+
+  public void setChildIntegerIndex(Integer childIntegerIndex) {
+    this.childIntegerIndex = childIntegerIndex;
+  }
+
+  public Integer getParentIntegerIndex() {
+    return childIntegerIndex;
+  }
+
+  public void setParentIntegerIndex(Integer parentIntegerIndex) {
+    this.parentIntegerIndex = parentIntegerIndex;
+  }
+
   public Integer getUpdateRule() {
     return updateRule;
   }
@@ -130,19 +149,19 @@ public class MConstraint
     this.parentTable = pt;
   }
 
-  public MColumnDescriptor getChildColumn() {
-    return childColumn;
+  public MColumnDescriptor getParentColumn() {
+    return parentColumn;
   }
 
-  public void setChildColumn(MColumnDescriptor cc) {
-    this.childColumn = cc;
+  public void setParentColumn(MColumnDescriptor name) {
+    this.parentColumn = name;
   }
 
-  public MColumnDescriptor getParentColumn() {
-    return parentColumn;
+  public MColumnDescriptor getChildColumn() {
+    return childColumn;
   }
 
-  public void setParentColumn(MColumnDescriptor pc) {
-    this.parentColumn = pc;
+  public void setChildColumn(MColumnDescriptor name) {
+    this.childColumn = name;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/metastore/src/model/package.jdo
----------------------------------------------------------------------
diff --git a/metastore/src/model/package.jdo b/metastore/src/model/package.jdo
index a58c046..bfd6ddd 100644
--- a/metastore/src/model/package.jdo
+++ b/metastore/src/model/package.jdo
@@ -194,12 +194,18 @@
       <field name="childColumn">
         <column name="CHILD_CD_ID"/>
       </field>
+      <field name="childIntegerIndex">
+        <column name="CHILD_INTEGER_IDX"/>
+      </field>
       <field name="childTable">
         <column name="CHILD_TBL_ID"/>
       </field>
       <field name="parentColumn">
         <column name="PARENT_CD_ID"/>
       </field>
+      <field name="parentIntegerIndex">
+	<column name="PARENT_INTEGER_IDX"/>
+      </field>
       <field name="parentTable">
         <column name="PARENT_TBL_ID"/>
       </field>

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index dc2b7ff..0204fcd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -117,6 +117,7 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
 import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
 import org.apache.hadoop.hive.ql.metadata.CheckResult;
+import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker;
@@ -124,6 +125,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.PartitionIterable;
+import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
 import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter;
@@ -3092,14 +3094,19 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
           }
         }
       }
-
+      PrimaryKeyInfo pkInfo = null;
+      ForeignKeyInfo fkInfo = null;
+      if (descTbl.isExt() || descTbl.isFormatted()) {
+        pkInfo = db.getPrimaryKeys(tbl.getDbName(), tbl.getTableName());
+        fkInfo = db.getForeignKeys(tbl.getDbName(), tbl.getTableName());
+      }
       fixDecimalColumnTypeName(cols);
       // In case the query is served by HiveServer2, don't pad it with spaces,
       // as HiveServer2 output is consumed by JDBC/ODBC clients.
       boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
       formatter.describeTable(outStream, colPath, tableName, tbl, part,
           cols, descTbl.isFormatted(), descTbl.isExt(),
-          descTbl.isPretty(), isOutputPadded, colStats);
+          descTbl.isPretty(), isOutputPadded, colStats, pkInfo, fkInfo);
 
       LOG.info("DDLTask: written data for " + tbl.getTableName());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
new file mode 100644
index 0000000..a1f9f18
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+
+/**
+ * ForeignKeyInfo is a metadata structure containing the foreign keys associated with a table.
+ * The fields include the child database name, the child table name, mapping of the constraint
+ * name to the foreign key columns associated with the key. The foreign key column structure 
+ * contains the parent database name, parent table name, associated parent column name,
+ * associated child column name and the position of the foreign key column in the key.
+ * The position is one-based index.
+ */
+@SuppressWarnings("serial")
+public class ForeignKeyInfo implements Serializable {
+
+  public class ForeignKeyCol {
+    public String parentTableName;
+    public String parentDatabaseName;
+    public String parentColName;
+    public String childColName;
+    public Integer position;
+
+    public ForeignKeyCol(String parentTableName, String parentDatabaseName, String parentColName,
+      String childColName, Integer position) {
+      this.parentTableName = parentTableName;
+      this.parentDatabaseName = parentDatabaseName;
+      this.parentColName = parentColName;
+      this.childColName = childColName;
+      this.position = position;
+    }
+  }
+
+  // Mapping from constraint name to list of foreign keys
+  Map<String, List<ForeignKeyCol>> foreignKeys;
+  String childTableName;
+  String childDatabaseName;
+
+  public ForeignKeyInfo() {}
+
+  public ForeignKeyInfo(List<SQLForeignKey> fks, String childTableName, String childDatabaseName) {
+    this.childTableName = childTableName;
+    this.childDatabaseName = childDatabaseName;
+    foreignKeys = new HashMap<String, List<ForeignKeyCol>>();
+    if (fks == null) {
+      return;
+    }
+    for (SQLForeignKey fk : fks) {
+      if (fk.getFktable_db().equalsIgnoreCase(childDatabaseName) &&
+          fk.getFktable_name().equalsIgnoreCase(childTableName)) {
+        ForeignKeyCol currCol = new ForeignKeyCol(fk.getPktable_name(), fk.getPktable_db(),
+          fk.getPkcolumn_name(), fk.getFkcolumn_name(), fk.getKey_seq());
+        String constraintName = fk.getFk_name();
+        if (foreignKeys.containsKey(constraintName)) {
+          foreignKeys.get(constraintName).add(currCol);
+        } else {
+          List<ForeignKeyCol> currList = new ArrayList<ForeignKeyCol>();
+          currList.add(currCol);
+          foreignKeys.put(constraintName, currList);
+        }
+      }
+    }
+  }
+
+  public String getChildTableName() {
+    return childTableName;
+  }
+
+  public String getChildDatabaseName() {
+    return childDatabaseName;
+  }
+
+  public Map<String, List<ForeignKeyCol>> getForeignKeys() {
+    return foreignKeys;
+  }
+
+  public void setChildTableName(String tableName) {
+    this.childTableName = tableName;
+  }
+
+  public void setChildDatabaseName(String databaseName) {
+    this.childDatabaseName = databaseName;
+  }
+
+  public void setForeignKeys(Map<String, List<ForeignKeyCol>> foreignKeys) {
+    this.foreignKeys = foreignKeys;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("Foreign Keys for " + childDatabaseName+"."+childTableName+":");
+    sb.append("[");
+    if (foreignKeys != null && foreignKeys.size() > 0) {
+      for (Map.Entry<String, List<ForeignKeyCol>> me : foreignKeys.entrySet()) {
+        sb.append(" {Constraint Name: " + me.getKey() + ",");
+        List<ForeignKeyCol> currCol = me.getValue();
+        if (currCol != null && currCol.size() > 0) {
+          for (ForeignKeyCol fkc : currCol) {
+            sb.append (" (Parent Column Name: " + fkc.parentDatabaseName +
+              "."+ fkc.parentTableName + "." + fkc.parentColName +
+              ", Column Name: " + fkc.childColName + ", Key Sequence: " + fkc.position+ "),");
+          }
+          sb.setLength(sb.length()-1);
+        }
+        sb.append("},");
+      }
+      sb.setLength(sb.length()-1);
+    }
+    sb.append("]");
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index dd14124..981b961 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -87,6 +87,7 @@ import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.FireEventRequest;
 import org.apache.hadoop.hive.metastore.api.FireEventRequestData;
+import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
 import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest;
@@ -101,6 +102,7 @@ import org.apache.hadoop.hive.metastore.api.MetadataPpdResult;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest;
 import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
@@ -3620,4 +3622,38 @@ private void constructOneLBLocationMap(FileStatus fSta,
     }
   }
 
+  /**
+   * Get all primary key columns associated with the table.
+   *
+   * @param dbName Database Name
+   * @param tblName Table Name
+   * @return Primary Key associated with the table.
+   * @throws HiveException
+   */
+  public PrimaryKeyInfo getPrimaryKeys(String dbName, String tblName) throws HiveException {
+    try {
+      List<SQLPrimaryKey> primaryKeys = getMSC().getPrimaryKeys(new PrimaryKeysRequest(dbName, tblName));
+      return new PrimaryKeyInfo(primaryKeys, tblName, dbName);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  /**
+   * Get all foreign keys associated with the table.
+   *
+   * @param dbName Database Name
+   * @param tblName Table Name
+   * @return Foreign keys associated with the table.
+   * @throws HiveException
+   */
+  public ForeignKeyInfo getForeignKeys(String dbName, String tblName) throws HiveException {
+    try {
+      List<SQLForeignKey> foreignKeys = getMSC().getForeignKeys(new ForeignKeysRequest(null, null, dbName, tblName));
+      return new ForeignKeyInfo(foreignKeys, tblName, dbName);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
 };

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
new file mode 100644
index 0000000..77ea687
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.io.Serializable;
+import java.util.Map;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+
+/**
+ * PrimaryKeyInfo is a metadata structure containing the primary key associated with a table.
+ * The fields include the table name, database name, constraint name, 
+ * mapping of the position of the primary key column to the column name.
+ * The position is one-based index.
+ */
+@SuppressWarnings("serial")
+public class PrimaryKeyInfo implements Serializable {
+
+  Map<Integer, String> colNames;
+  String constraintName;
+  String tableName;
+  String databaseName;
+
+  public PrimaryKeyInfo() {}
+
+  public PrimaryKeyInfo(List<SQLPrimaryKey> pks, String tableName, String databaseName) {
+    this.tableName = tableName;
+    this.databaseName = databaseName;
+    this.colNames = new TreeMap<Integer, String>();
+    if (pks ==null) {
+      return;
+    }
+    for (SQLPrimaryKey pk : pks) {
+      if (pk.getTable_db().equalsIgnoreCase(databaseName) &&
+          pk.getTable_name().equalsIgnoreCase(tableName)) {
+        colNames.put(pk.getKey_seq(), pk.getColumn_name());
+        this.constraintName = pk.getPk_name();
+      }
+    }
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public Map<Integer, String> getColNames() {
+    return colNames;
+  }
+
+  public String getConstraintName() {
+    return constraintName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public void setConstraintName(String constraintName) {
+    this.constraintName = constraintName;
+  }
+
+  public void setColNames(Map<Integer, String> colNames) {
+    this.colNames = colNames;
+  }
+  
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("Primary Key for " + databaseName+"."+tableName+":");
+    sb.append("[");
+    if (colNames != null && colNames.size() > 0) {
+      for (Map.Entry<Integer, String> me : colNames.entrySet()) {
+        sb.append(me.getValue()+",");
+      }
+      sb.setLength(sb.length()-1);
+    }
+    sb.append("], Constraint Name: " + constraintName);
+    return sb.toString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
index 75c2dd9..3315806 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
@@ -38,9 +38,11 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.codehaus.jackson.map.ObjectMapper;
 
@@ -102,7 +104,7 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
   public void describeTable(DataOutputStream out, String colPath,
       String tableName, Table tbl, Partition part, List<FieldSchema> cols,
       boolean isFormatted, boolean isExt, boolean isPretty,
-      boolean isOutputPadded, List<ColumnStatisticsObj> colStats) throws HiveException {
+      boolean isOutputPadded, List<ColumnStatisticsObj> colStats, PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo) throws HiveException {
     MapBuilder builder = MapBuilder.create();
     builder.put("columns", makeColsUnformatted(cols));
 
@@ -113,6 +115,12 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
       else {
         builder.put("tableInfo", tbl.getTTable());
       }
+      if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
+        builder.put("primaryKeyInfo", pkInfo);
+      }
+      if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
+        builder.put("foreignKeyInfo", fkInfo);
+      }
     }
 
     asJson(out, builder.build());

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index e76fb91..a2ccd56 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -38,8 +38,11 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
 import org.apache.hadoop.hive.ql.index.HiveIndex;
 import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType;
+import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo.ForeignKeyCol;
 import org.apache.hadoop.hive.ql.plan.DescTableDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc;
@@ -106,6 +109,7 @@ public final class MetaDataFormatUtils {
    * @param printHeader - if header should be included
    * @param isOutputPadded - make it more human readable by setting indentation
    *        with spaces. Turned off for use by HiveServer2
+   * @param showParColsSep - show partition column separator
    * @return string with formatted column information
    */
   public static String getAllColumnsInformation(List<FieldSchema> cols,
@@ -269,6 +273,69 @@ public final class MetaDataFormatUtils {
     return indexInfo.toString();
   }
 
+  public static String getConstraintsInformation(PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo) {
+    StringBuilder constraintsInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+
+    constraintsInfo.append(LINE_DELIM).append("# Constraints").append(LINE_DELIM);
+    if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
+      constraintsInfo.append(LINE_DELIM).append("# Primary Key").append(LINE_DELIM);
+      getPrimaryKeyInformation(constraintsInfo, pkInfo);
+    }
+    if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
+      constraintsInfo.append(LINE_DELIM).append("# Foreign Keys").append(LINE_DELIM);
+      getForeignKeysInformation(constraintsInfo, fkInfo);
+    }
+    return constraintsInfo.toString();
+  }
+
+  private static void  getPrimaryKeyInformation(StringBuilder constraintsInfo,
+    PrimaryKeyInfo pkInfo) {
+    formatOutput("Table:", pkInfo.getDatabaseName()+"."+pkInfo.getTableName(), constraintsInfo);
+    formatOutput("Constraint Name:", pkInfo.getConstraintName(), constraintsInfo);
+    Map<Integer, String> colNames = pkInfo.getColNames();
+    final String columnNames = "Column Names:";
+    constraintsInfo.append(String.format("%-" + ALIGNMENT + "s", columnNames)).append(FIELD_DELIM);
+    if (colNames != null && colNames.size() > 0) {
+      formatOutput(colNames.values().toArray(new String[colNames.size()]), constraintsInfo);
+    }
+  }
+
+  private static void getForeignKeyColInformation(StringBuilder constraintsInfo,
+    ForeignKeyCol fkCol) {
+      String[] fkcFields = new String[3];
+      fkcFields[0] = "Parent Column Name:" + fkCol.parentDatabaseName +
+          "."+ fkCol.parentTableName + "." + fkCol.parentColName;
+      fkcFields[1] = "Column Name:" + fkCol.childColName;
+      fkcFields[2] = "Key Sequence:" + fkCol.position;
+      formatOutput(fkcFields, constraintsInfo);
+  }
+
+  private static void getForeignKeyRelInformation(
+    StringBuilder constraintsInfo,
+    String constraintName,
+    List<ForeignKeyCol> fkRel) {
+    formatOutput("Constraint Name:", constraintName, constraintsInfo);
+    if (fkRel != null && fkRel.size() > 0) {
+      for (ForeignKeyCol fkc : fkRel) {
+        getForeignKeyColInformation(constraintsInfo, fkc);
+      }
+    }
+    constraintsInfo.append(LINE_DELIM);
+  }
+
+  private static void  getForeignKeysInformation(StringBuilder constraintsInfo,
+    ForeignKeyInfo fkInfo) {
+    formatOutput("Table:",
+                 fkInfo.getChildDatabaseName()+"."+fkInfo.getChildTableName(),
+                 constraintsInfo);
+    Map<String, List<ForeignKeyCol>> foreignKeys = fkInfo.getForeignKeys();
+    if (foreignKeys != null && foreignKeys.size() > 0) {
+      for (Map.Entry<String, List<ForeignKeyCol>> me : foreignKeys.entrySet()) {
+        getForeignKeyRelInformation(constraintsInfo, me.getKey(), me.getValue());
+      }
+    }
+  }
+
   public static String getPartitionInformation(Partition part) {
     StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
index 55e1b3b..71b7ebf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
@@ -27,9 +27,11 @@ import java.util.Set;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 /**
@@ -71,12 +73,14 @@ public interface MetaDataFormatter {
    * @param isPretty
    * @param isOutputPadded - if true, add spacing and indentation
    * @param colStats
+   * @param fkInfo  foreign keys information
+   * @param pkInfo  primary key information
    * @throws HiveException
    */
   public void describeTable(DataOutputStream out, String colPath,
       String tableName, Table tbl, Partition part, List<FieldSchema> cols,
       boolean isFormatted, boolean isExt, boolean isPretty,
-      boolean isOutputPadded, List<ColumnStatisticsObj> colStats)
+      boolean isOutputPadded, List<ColumnStatisticsObj> colStats, PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo)
           throws HiveException;
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index b5dc0b4..47d67b1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -38,9 +38,11 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -117,7 +119,7 @@ class TextMetaDataFormatter implements MetaDataFormatter {
   public void describeTable(DataOutputStream outStream,  String colPath,
       String tableName, Table tbl, Partition part, List<FieldSchema> cols,
       boolean isFormatted, boolean isExt, boolean isPretty,
-      boolean isOutputPadded, List<ColumnStatisticsObj> colStats) throws HiveException {
+      boolean isOutputPadded, List<ColumnStatisticsObj> colStats, PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo) throws HiveException {
     try {
       String output;
       if (colPath.equals(tableName)) {
@@ -140,6 +142,12 @@ class TextMetaDataFormatter implements MetaDataFormatter {
             output = MetaDataFormatUtils.getTableInformation(tbl);
           }
           outStream.write(output.getBytes("UTF-8"));
+
+          if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) ||
+              (fkInfo != null && !fkInfo.getForeignKeys().isEmpty())) {
+            output = MetaDataFormatUtils.getConstraintsInformation(pkInfo, fkInfo);
+            outStream.write(output.getBytes("UTF-8"));
+          }
         }
 
         // if extended desc table then show the complete details of the table
@@ -162,6 +170,19 @@ class TextMetaDataFormatter implements MetaDataFormatter {
             outStream.write(separator);
             outStream.write(terminator);
           }
+          if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) ||
+              (fkInfo != null && !fkInfo.getForeignKeys().isEmpty())) {
+              outStream.write(("Constraints").getBytes("UTF-8"));
+              outStream.write(separator);
+              if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
+                outStream.write(pkInfo.toString().getBytes("UTF-8"));
+                outStream.write(terminator);
+              }
+              if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
+                outStream.write(fkInfo.toString().getBytes("UTF-8"));
+                outStream.write(terminator);
+              }
+          }
         }
       }
     } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/882a7f00/ql/src/test/queries/clientpositive/create_with_constraints.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_with_constraints.q b/ql/src/test/queries/clientpositive/create_with_constraints.q
index 0bb92e4..7aabf74 100644
--- a/ql/src/test/queries/clientpositive/create_with_constraints.q
+++ b/ql/src/test/queries/clientpositive/create_with_constraints.q
@@ -13,12 +13,52 @@ CREATE TABLE table10 (a STRING, b STRING, constraint pk10 primary key (a) disabl
 CREATE TABLE table11 (a STRING, b STRING, c STRING, constraint pk11 primary key (a) disable novalidate rely, foreign key (a, b) references table9(a, b) disable novalidate,
 foreign key (c) references table4(x) disable novalidate);
 
+DESCRIBE EXTENDED table1;
+DESCRIBE EXTENDED table2;
+DESCRIBE EXTENDED table3;
+DESCRIBE EXTENDED table4;
+DESCRIBE EXTENDED table5;
+DESCRIBE EXTENDED table6;
+DESCRIBE EXTENDED table7;
+DESCRIBE EXTENDED table8;
+DESCRIBE EXTENDED table9;
+DESCRIBE EXTENDED table10;
+DESCRIBE EXTENDED table11;
+
+DESCRIBE FORMATTED table1;
+DESCRIBE FORMATTED table2;
+DESCRIBE FORMATTED table3;
+DESCRIBE FORMATTED table4;
+DESCRIBE FORMATTED table5;
+DESCRIBE FORMATTED table6;
+DESCRIBE FORMATTED table7;
+DESCRIBE FORMATTED table8;
+DESCRIBE FORMATTED table9;
+DESCRIBE FORMATTED table10;
+DESCRIBE FORMATTED table11;
+
+
 ALTER TABLE table2 DROP CONSTRAINT pk1;
 ALTER TABLE table3 DROP CONSTRAINT fk1;
 ALTER TABLE table6 DROP CONSTRAINT fk4;
 
+DESCRIBE EXTENDED table2;
+DESCRIBE EXTENDED table3;
+DESCRIBE EXTENDED table6;
+
+DESCRIBE FORMATTED table2;
+DESCRIBE FORMATTED table3;
+DESCRIBE FORMATTED table6;
+
 CREATE DATABASE dbconstraint;
 USE dbconstraint;
 CREATE TABLE table2 (a STRING, b STRING, constraint pk1 primary key (a) disable novalidate);
 USE default;
+
+DESCRIBE EXTENDED dbconstraint.table2;
+DESCRIBE FORMATTED dbconstraint.table2;
+
 ALTER TABLE dbconstraint.table2 DROP CONSTRAINT pk1;
+
+DESCRIBE EXTENDED dbconstraint.table2;
+DESCRIBE FORMATTED dbconstraint.table2;