You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2017/10/19 18:07:09 UTC

[1/2] hive git commit: HIVE-16603: Enforce foreign keys to refer to primary keys or unique keys (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 317922af8 -> abcadab7c


HIVE-16603: Enforce foreign keys to refer to primary keys or unique keys (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c4c7568c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c4c7568c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c4c7568c

Branch: refs/heads/master
Commit: c4c7568c9b6677d34a3075086d360f394a97dea5
Parents: 317922a
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Wed Oct 18 12:35:58 2017 -0700
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Thu Oct 19 11:05:56 2017 -0700

----------------------------------------------------------------------
 .../hive/ql/parse/TestReplicationScenarios.java |  20 +-
 .../alter_table_constraint_invalid_ref.q        |   3 +
 .../clientnegative/create_with_fk_pk_same_tab.q |   1 +
 .../clientnegative/create_with_fk_uk_same_tab.q |   1 +
 .../clientnegative/create_with_fk_wrong_ref.q   |   2 +
 .../clientnegative/create_with_fk_wrong_ref2.q  |   2 +
 .../clientpositive/create_with_constraints.q    |  12 +
 .../alter_table_constraint_invalid_ref.q.out    |  19 ++
 .../create_with_fk_pk_same_tab.q.out            |   5 +
 .../create_with_fk_uk_same_tab.q.out            |   5 +
 .../create_with_fk_wrong_ref.q.out              |  13 +
 .../create_with_fk_wrong_ref2.q.out             |  13 +
 .../create_with_constraints.q.out               | 242 +++++++++++++++
 .../hadoop/hive/metastore/ObjectStore.java      | 291 ++++++++++++++-----
 14 files changed, 542 insertions(+), 87 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 276c464..cec8490 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -2933,9 +2933,9 @@ public class TestReplicationScenarios {
 
     run("CREATE DATABASE " + dbName, driver);
 
-    run("CREATE TABLE " + dbName + ".tbl1(a string, b string, primary key (a) disable novalidate rely, unique (b) disable)", driver);
+    run("CREATE TABLE " + dbName + ".tbl1(a string, b string, primary key (a, b) disable novalidate rely)", driver);
     run("CREATE TABLE " + dbName + ".tbl2(a string, b string, foreign key (a, b) references " + dbName + ".tbl1(a, b) disable novalidate)", driver);
-    run("CREATE TABLE " + dbName + ".tbl3(a string, b string not null disable)", driver);
+    run("CREATE TABLE " + dbName + ".tbl3(a string, b string not null disable, unique (a) disable)", driver);
 
     advanceDumpDir();
     run("REPL DUMP " + dbName, driver);
@@ -2946,20 +2946,20 @@ public class TestReplicationScenarios {
 
     try {
       List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(dbName+ "_dupe" , "tbl1"));
-      assertEquals(pks.size(), 1);
-      List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(dbName+ "_dupe" , "tbl1"));
+      assertEquals(pks.size(), 2);
+      List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(dbName+ "_dupe" , "tbl3"));
       assertEquals(uks.size(), 1);
       List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, dbName+ "_dupe" , "tbl2"));
-      assertEquals(fks.size(), 1);
+      assertEquals(fks.size(), 2);
       List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(dbName+ "_dupe" , "tbl3"));
       assertEquals(nns.size(), 1);
     } catch (TException te) {
       assertNull(te);
     }
 
-    run("CREATE TABLE " + dbName + ".tbl4(a string, b string, primary key (a) disable novalidate rely, unique (b) disable)", driver);
+    run("CREATE TABLE " + dbName + ".tbl4(a string, b string, primary key (a, b) disable novalidate rely)", driver);
     run("CREATE TABLE " + dbName + ".tbl5(a string, b string, foreign key (a, b) references " + dbName + ".tbl4(a, b) disable novalidate)", driver);
-    run("CREATE TABLE " + dbName + ".tbl6(a string, b string not null disable)", driver);
+    run("CREATE TABLE " + dbName + ".tbl6(a string, b string not null disable, unique (a) disable)", driver);
 
     advanceDumpDir();
     run("REPL DUMP " + dbName + " FROM " + replDumpId, driver);
@@ -2974,13 +2974,13 @@ public class TestReplicationScenarios {
     String nnName = null;
     try {
       List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(dbName+ "_dupe" , "tbl4"));
-      assertEquals(pks.size(), 1);
+      assertEquals(pks.size(), 2);
       pkName = pks.get(0).getPk_name();
-      List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(dbName+ "_dupe" , "tbl4"));
+      List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(dbName+ "_dupe" , "tbl6"));
       assertEquals(uks.size(), 1);
       ukName = uks.get(0).getUk_name();
       List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, dbName+ "_dupe" , "tbl5"));
-      assertEquals(fks.size(), 1);
+      assertEquals(fks.size(), 2);
       fkName = fks.get(0).getFk_name();
       List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(dbName+ "_dupe" , "tbl6"));
       assertEquals(nns.size(), 1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_ref.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_ref.q b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_ref.q
new file mode 100644
index 0000000..1b2b60b
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_constraint_invalid_ref.q
@@ -0,0 +1,3 @@
+CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable);
+CREATE TABLE table2 (a STRING, b STRING, primary key (a) disable rely);
+alter table table2 add constraint fk1 foreign key (a) references table1(b) disable novalidate;

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/queries/clientnegative/create_with_fk_pk_same_tab.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_with_fk_pk_same_tab.q b/ql/src/test/queries/clientnegative/create_with_fk_pk_same_tab.q
new file mode 100644
index 0000000..c270db6
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/create_with_fk_pk_same_tab.q
@@ -0,0 +1 @@
+CREATE TABLE table1 (a STRING PRIMARY KEY DISABLE, b STRING, CONSTRAINT fk1 FOREIGN KEY (a) REFERENCES table1(a) DISABLE);

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/queries/clientnegative/create_with_fk_uk_same_tab.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_with_fk_uk_same_tab.q b/ql/src/test/queries/clientnegative/create_with_fk_uk_same_tab.q
new file mode 100644
index 0000000..51f0213
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/create_with_fk_uk_same_tab.q
@@ -0,0 +1 @@
+CREATE TABLE table1 (a STRING UNIQUE DISABLE, b STRING, CONSTRAINT fk1 FOREIGN KEY (a) REFERENCES table1(a) DISABLE);

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref.q b/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref.q
new file mode 100644
index 0000000..bf75810
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref.q
@@ -0,0 +1,2 @@
+CREATE TABLE table2 (a INT PRIMARY KEY DISABLE, b STRING);
+CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN KEY (b) REFERENCES table2(a) DISABLE);

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref2.q b/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref2.q
new file mode 100644
index 0000000..09ccbe0
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/create_with_fk_wrong_ref2.q
@@ -0,0 +1,2 @@
+CREATE TABLE table2 (a STRING, b INT, CONSTRAINT uk1 UNIQUE (a,b) DISABLE, CONSTRAINT uk2 UNIQUE (b,a) DISABLE);
+CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN KEY (a,b) REFERENCES table2(a,b) DISABLE);

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/queries/clientpositive/create_with_constraints.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_with_constraints.q b/ql/src/test/queries/clientpositive/create_with_constraints.q
index 9700b5c..a17ca9d 100644
--- a/ql/src/test/queries/clientpositive/create_with_constraints.q
+++ b/ql/src/test/queries/clientpositive/create_with_constraints.q
@@ -19,6 +19,10 @@ CREATE TABLE table15 (a STRING REFERENCES table4(x) DISABLE, b STRING);
 CREATE TABLE table16 (a STRING CONSTRAINT nn16_1 REFERENCES table4(x) DISABLE RELY, b STRING);
 CREATE TABLE table17 (a STRING CONSTRAINT uk17_1 UNIQUE DISABLE RELY, b STRING);
 CREATE TABLE table18 (a STRING, b STRING, CONSTRAINT uk18_1 UNIQUE (b) DISABLE RELY);
+CREATE TABLE table19 (a STRING, b STRING, CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE RELY, CONSTRAINT fk19_2 FOREIGN KEY (a) REFERENCES table19(b) DISABLE RELY);
+CREATE TABLE table20 (a STRING, b STRING, CONSTRAINT uk20_1 UNIQUE (b) DISABLE RELY, CONSTRAINT fk20_2 FOREIGN KEY (a) REFERENCES table20(b) DISABLE RELY);
+CREATE TABLE table21 (a STRING, b STRING, CONSTRAINT uk21_1 UNIQUE (a,b) DISABLE);
+CREATE TABLE table22 (a STRING, b STRING, CONSTRAINT fk22_1 FOREIGN KEY (a,b) REFERENCES table21(a,b) DISABLE);
 
 DESCRIBE EXTENDED table1;
 DESCRIBE EXTENDED table2;
@@ -38,6 +42,10 @@ DESCRIBE EXTENDED table15;
 DESCRIBE EXTENDED table16;
 DESCRIBE EXTENDED table17;
 DESCRIBE EXTENDED table18;
+DESCRIBE EXTENDED table19;
+DESCRIBE EXTENDED table20;
+DESCRIBE EXTENDED table21;
+DESCRIBE EXTENDED table22;
 
 DESCRIBE FORMATTED table1;
 DESCRIBE FORMATTED table2;
@@ -57,6 +65,10 @@ DESCRIBE FORMATTED table15;
 DESCRIBE FORMATTED table16;
 DESCRIBE FORMATTED table17;
 DESCRIBE FORMATTED table18;
+DESCRIBE FORMATTED table19;
+DESCRIBE FORMATTED table20;
+DESCRIBE FORMATTED table21;
+DESCRIBE FORMATTED table22;
 
 ALTER TABLE table2 DROP CONSTRAINT pk1;
 ALTER TABLE table3 DROP CONSTRAINT fk1;

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out b/ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out
new file mode 100644
index 0000000..9e98454
--- /dev/null
+++ b/ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out
@@ -0,0 +1,19 @@
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+POSTHOOK: query: CREATE TABLE table1 (a STRING, b STRING, primary key (a) disable)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table1
+PREHOOK: query: CREATE TABLE table2 (a STRING, b STRING, primary key (a) disable rely)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
+POSTHOOK: query: CREATE TABLE table2 (a STRING, b STRING, primary key (a) disable rely)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table2
+PREHOOK: query: alter table table2 add constraint fk1 foreign key (a) references table1(b) disable novalidate
+PREHOOK: type: ALTERTABLE_ADDCONSTRAINT
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Foreign key references b:string; but no corresponding primary key or unique key exists. Possible keys: [a:string;])

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out b/ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out
new file mode 100644
index 0000000..fae2769
--- /dev/null
+++ b/ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out
@@ -0,0 +1,5 @@
+PREHOOK: query: CREATE TABLE table1 (a STRING PRIMARY KEY DISABLE, b STRING, CONSTRAINT fk1 FOREIGN KEY (a) REFERENCES table1(a) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Cannot be both foreign key and primary/unique key on same table: a:string;)

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out b/ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out
new file mode 100644
index 0000000..1644d5a
--- /dev/null
+++ b/ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out
@@ -0,0 +1,5 @@
+PREHOOK: query: CREATE TABLE table1 (a STRING UNIQUE DISABLE, b STRING, CONSTRAINT fk1 FOREIGN KEY (a) REFERENCES table1(a) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Cannot be both foreign key and primary/unique key on same table: a:string;)

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out b/ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out
new file mode 100644
index 0000000..ce0f947
--- /dev/null
+++ b/ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out
@@ -0,0 +1,13 @@
+PREHOOK: query: CREATE TABLE table2 (a INT PRIMARY KEY DISABLE, b STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
+POSTHOOK: query: CREATE TABLE table2 (a INT PRIMARY KEY DISABLE, b STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table2
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN KEY (b) REFERENCES table2(a) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Foreign key references a:string; but no corresponding primary key or unique key exists. Possible keys: [a:int;])

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out b/ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out
new file mode 100644
index 0000000..998c643
--- /dev/null
+++ b/ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out
@@ -0,0 +1,13 @@
+PREHOOK: query: CREATE TABLE table2 (a STRING, b INT, CONSTRAINT uk1 UNIQUE (a,b) DISABLE, CONSTRAINT uk2 UNIQUE (b,a) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
+POSTHOOK: query: CREATE TABLE table2 (a STRING, b INT, CONSTRAINT uk1 UNIQUE (a,b) DISABLE, CONSTRAINT uk2 UNIQUE (b,a) DISABLE)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table2
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN KEY (a,b) REFERENCES table2(a,b) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Foreign key references a:string;b:string; but no corresponding primary key or unique key exists. Possible keys: [b:int;a:string;, a:string;b:int;])

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/ql/src/test/results/clientpositive/create_with_constraints.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_with_constraints.q.out b/ql/src/test/results/clientpositive/create_with_constraints.q.out
index a675a0e..2f3daef 100644
--- a/ql/src/test/results/clientpositive/create_with_constraints.q.out
+++ b/ql/src/test/results/clientpositive/create_with_constraints.q.out
@@ -148,6 +148,38 @@ POSTHOOK: query: CREATE TABLE table18 (a STRING, b STRING, CONSTRAINT uk18_1 UNI
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@table18
+PREHOOK: query: CREATE TABLE table19 (a STRING, b STRING, CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE RELY, CONSTRAINT fk19_2 FOREIGN KEY (a) REFERENCES table19(b) DISABLE RELY)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table19
+POSTHOOK: query: CREATE TABLE table19 (a STRING, b STRING, CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE RELY, CONSTRAINT fk19_2 FOREIGN KEY (a) REFERENCES table19(b) DISABLE RELY)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table19
+PREHOOK: query: CREATE TABLE table20 (a STRING, b STRING, CONSTRAINT uk20_1 UNIQUE (b) DISABLE RELY, CONSTRAINT fk20_2 FOREIGN KEY (a) REFERENCES table20(b) DISABLE RELY)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table20
+POSTHOOK: query: CREATE TABLE table20 (a STRING, b STRING, CONSTRAINT uk20_1 UNIQUE (b) DISABLE RELY, CONSTRAINT fk20_2 FOREIGN KEY (a) REFERENCES table20(b) DISABLE RELY)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table20
+PREHOOK: query: CREATE TABLE table21 (a STRING, b STRING, CONSTRAINT uk21_1 UNIQUE (a,b) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table21
+POSTHOOK: query: CREATE TABLE table21 (a STRING, b STRING, CONSTRAINT uk21_1 UNIQUE (a,b) DISABLE)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table21
+PREHOOK: query: CREATE TABLE table22 (a STRING, b STRING, CONSTRAINT fk22_1 FOREIGN KEY (a,b) REFERENCES table21(a,b) DISABLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table22
+POSTHOOK: query: CREATE TABLE table22 (a STRING, b STRING, CONSTRAINT fk22_1 FOREIGN KEY (a,b) REFERENCES table21(a,b) DISABLE)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table22
 PREHOOK: query: DESCRIBE EXTENDED table1
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table1
@@ -351,6 +383,50 @@ b                   	string
 	 	 
 #### A masked pattern was here ####
 Constraints	Unique Constraints for default.table18:[ {Constraint Name: uk18_1, (Column Name: b, Key Sequence: 1)}]	 
+PREHOOK: query: DESCRIBE EXTENDED table19
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table19
+POSTHOOK: query: DESCRIBE EXTENDED table19
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table19
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+#### A masked pattern was here ####
+Constraints	Primary Key for default.table19:[b], Constraint Name: pk19_1	 
+Foreign Keys for default.table19:[ {Constraint Name: fk19_2, (Parent Column Name: default.table19.b, Column Name: a, Key Sequence: 1)}]	 	 
+PREHOOK: query: DESCRIBE EXTENDED table20
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table20
+POSTHOOK: query: DESCRIBE EXTENDED table20
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table20
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+#### A masked pattern was here ####
+Constraints	Unique Constraints for default.table20:[ {Constraint Name: uk20_1, (Column Name: b, Key Sequence: 1)}]	 
+PREHOOK: query: DESCRIBE EXTENDED table21
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table21
+POSTHOOK: query: DESCRIBE EXTENDED table21
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table21
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+#### A masked pattern was here ####
+Constraints	Unique Constraints for default.table21:[ {Constraint Name: uk21_1, (Column Name: a, Key Sequence: 1), (Column Name: b, Key Sequence: 2)}]	 
+PREHOOK: query: DESCRIBE EXTENDED table22
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table22
+POSTHOOK: query: DESCRIBE EXTENDED table22
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table22
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+#### A masked pattern was here ####
 PREHOOK: query: DESCRIBE FORMATTED table1
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@table1
@@ -1139,6 +1215,172 @@ Table:              	default.table18
 Constraint Name:    	uk18_1              	 
 Column Name:b       	Key Sequence:1      	 
 	 	 
+PREHOOK: query: DESCRIBE FORMATTED table19
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table19
+POSTHOOK: query: DESCRIBE FORMATTED table19
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table19
+# col_name            	data_type           	comment             
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+	 	 
+# Constraints	 	 
+	 	 
+# Primary Key	 	 
+Table:              	default.table19     	 
+Constraint Name:    	pk19_1              	 
+Column Names:       	b                   	 
+	 	 
+# Foreign Keys	 	 
+Table:              	default.table19     	 
+Constraint Name:    	fk19_2              	 
+Parent Column Name:default.table19.b	Column Name:a       	Key Sequence:1      
+	 	 
+PREHOOK: query: DESCRIBE FORMATTED table20
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table20
+POSTHOOK: query: DESCRIBE FORMATTED table20
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table20
+# col_name            	data_type           	comment             
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+	 	 
+# Constraints	 	 
+	 	 
+# Unique Constraints	 	 
+Table:              	default.table20     	 
+Constraint Name:    	uk20_1              	 
+Column Name:b       	Key Sequence:1      	 
+	 	 
+PREHOOK: query: DESCRIBE FORMATTED table21
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table21
+POSTHOOK: query: DESCRIBE FORMATTED table21
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table21
+# col_name            	data_type           	comment             
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+	 	 
+# Constraints	 	 
+	 	 
+# Unique Constraints	 	 
+Table:              	default.table21     	 
+Constraint Name:    	uk21_1              	 
+Column Name:a       	Key Sequence:1      	 
+Column Name:b       	Key Sequence:2      	 
+	 	 
+PREHOOK: query: DESCRIBE FORMATTED table22
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table22
+POSTHOOK: query: DESCRIBE FORMATTED table22
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table22
+# col_name            	data_type           	comment             
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: ALTER TABLE table2 DROP CONSTRAINT pk1
 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT
 POSTHOOK: query: ALTER TABLE table2 DROP CONSTRAINT pk1

http://git-wip-us.apache.org/repos/asf/hive/blob/c4c7568c/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 625830e..6b70c59 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -34,6 +34,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -185,8 +186,14 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Supplier;
+import com.google.common.base.Suppliers;
+import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import com.google.common.collect.Multimap;
+import com.google.common.collect.SortedSetMultimap;
+import com.google.common.collect.TreeMultimap;
 
 
 /**
@@ -1084,8 +1091,8 @@ public class ObjectStore implements RawStore, Configurable {
       // Add constraints.
       // We need not do a deep retrieval of the Table Column Descriptor while persisting the
       // constraints since this transaction involving create table is not yet committed.
-      List<String> constraintNames = addPrimaryKeys(primaryKeys, false);
-      constraintNames.addAll(addForeignKeys(foreignKeys, false));
+      List<String> constraintNames = addForeignKeys(foreignKeys, false, primaryKeys, uniqueConstraints);
+      constraintNames.addAll(addPrimaryKeys(primaryKeys, false));
       constraintNames.addAll(addUniqueConstraints(uniqueConstraints, false));
       constraintNames.addAll(addNotNullConstraints(notNullConstraints, false));
       success = commitTransaction();
@@ -3808,7 +3815,20 @@ public class ObjectStore implements RawStore, Configurable {
     return sds;
   }
 
-  private int getColumnIndexFromTableColumns(List<MFieldSchema> cols, String col) {
+  private static MFieldSchema getColumnFromTableColumns(List<MFieldSchema> cols, String col) {
+    if (cols == null) {
+      return null;
+    }
+    for (int i = 0; i < cols.size(); i++) {
+      MFieldSchema mfs = cols.get(i);
+      if (mfs.getName().equalsIgnoreCase(col)) {
+        return mfs;
+      }
+    }
+    return null;
+  }
+
+  private static int getColumnIndexFromTableColumns(List<MFieldSchema> cols, String col) {
     if (cols == null) {
       return -1;
     }
@@ -3857,7 +3877,7 @@ public class ObjectStore implements RawStore, Configurable {
   @Override
   public List<String> addForeignKeys(
     List<SQLForeignKey> fks) throws InvalidObjectException, MetaException {
-   return addForeignKeys(fks, true);
+   return addForeignKeys(fks, true, null, null);
   }
 
   @Override
@@ -3931,89 +3951,206 @@ public class ObjectStore implements RawStore, Configurable {
     return null;
   }
 
-  private List<String> addForeignKeys(
-    List<SQLForeignKey> fks, boolean retrieveCD) throws InvalidObjectException,
-    MetaException {
+  private List<String> addForeignKeys(List<SQLForeignKey> foreignKeys, boolean retrieveCD,
+      List<SQLPrimaryKey> primaryKeys, List<SQLUniqueConstraint> uniqueConstraints)
+          throws InvalidObjectException, MetaException {
     List<String> fkNames = new ArrayList<>();
-    List<MConstraint> mpkfks = new ArrayList<>();
-    String currentConstraintName = null;
 
-    for (int i = 0; i < fks.size(); i++) {
-      final String pkTableDB = normalizeIdentifier(fks.get(i).getPktable_db());
-      final String pkTableName = normalizeIdentifier(fks.get(i).getPktable_name());
-      final String pkColumnName =normalizeIdentifier(fks.get(i).getPkcolumn_name());
-      final String fkTableDB = normalizeIdentifier(fks.get(i).getFktable_db());
-      final String fkTableName = normalizeIdentifier(fks.get(i).getFktable_name());
-      final String fkColumnName = normalizeIdentifier(fks.get(i).getFkcolumn_name());
+    if (foreignKeys.size() > 0) {
+      List<MConstraint> mpkfks = new ArrayList<>();
+      String currentConstraintName = null;
+      // We start iterating through the foreign keys. This list might contain more than a single
+      // foreign key, and each foreign key might contain multiple columns. The outer loop retrieves
+      // the information that is common for a single key (table information) while the inner loop
+      // checks / adds information about each column.
+      for (int i = 0; i < foreignKeys.size(); i++) {
+        final String fkTableDB = normalizeIdentifier(foreignKeys.get(i).getFktable_db());
+        final String fkTableName = normalizeIdentifier(foreignKeys.get(i).getFktable_name());
+        // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
+        // For instance, this is the case when we are creating the table.
+        final AttachedMTableInfo nChildTable = getMTable(fkTableDB, fkTableName, retrieveCD);
+        final MTable childTable = nChildTable.mtbl;
+        if (childTable == null) {
+          throw new InvalidObjectException("Child table not found: " + fkTableName);
+        }
+        final MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
+        final List<MFieldSchema> childCols = childCD.getCols();
+
+        final String pkTableDB = normalizeIdentifier(foreignKeys.get(i).getPktable_db());
+        final String pkTableName = normalizeIdentifier(foreignKeys.get(i).getPktable_name());
+        // For primary keys, we retrieve the column descriptors if retrieveCD is true (which means
+        // it is an alter table statement) or if it is a create table statement but we are
+        // referencing another table instead of self for the primary key.
+        final AttachedMTableInfo nParentTable;
+        final MTable parentTable;
+        final MColumnDescriptor parentCD;
+        final List<MFieldSchema> parentCols;
+        final List<SQLPrimaryKey> existingTablePrimaryKeys;
+        final List<SQLUniqueConstraint> existingTableUniqueConstraints;
+        final boolean sameTable = fkTableDB.equals(pkTableDB) && fkTableName.equals(pkTableName);
+        if (sameTable) {
+          nParentTable = nChildTable;
+          parentTable = childTable;
+          parentCD = childCD;
+          parentCols = childCols;
+          existingTablePrimaryKeys = primaryKeys;
+          existingTableUniqueConstraints = uniqueConstraints;
+        } else {
+          nParentTable = getMTable(pkTableDB, pkTableName, true);
+          parentTable = nParentTable.mtbl;
+          if (parentTable == null) {
+            throw new InvalidObjectException("Parent table not found: " + pkTableName);
+          }
+          parentCD = nParentTable.mcd;
+          parentCols = parentCD == null ? null : parentCD.getCols();
+          existingTablePrimaryKeys = getPrimaryKeys(pkTableDB, pkTableName);
+          existingTableUniqueConstraints = getUniqueConstraints(pkTableDB, pkTableName);
+        }
 
-      // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
-      // For instance, this is the case when we are creating the table.
-      AttachedMTableInfo nParentTable = getMTable(pkTableDB, pkTableName, retrieveCD);
-      MTable parentTable = nParentTable.mtbl;
-      if (parentTable == null) {
-        throw new InvalidObjectException("Parent table not found: " + pkTableName);
-      }
+        // Here we build an aux structure that is used to verify that the foreign key that is declared
+        // is actually referencing a valid primary key or unique key. We also check that the types of
+        // the columns correspond.
+        if (existingTablePrimaryKeys.isEmpty() && existingTableUniqueConstraints.isEmpty()) {
+          throw new MetaException(
+              "Trying to define foreign key but there are no primary keys or unique keys for referenced table");
+        }
+        final Set<String> validPKsOrUnique = generateValidPKsOrUniqueSignatures(parentCols,
+            existingTablePrimaryKeys, existingTableUniqueConstraints);
+
+        StringBuilder fkSignature = new StringBuilder();
+        StringBuilder referencedKSignature = new StringBuilder();
+        for (; i < foreignKeys.size(); i++) {
+          final SQLForeignKey foreignKey = foreignKeys.get(i);
+          final String fkColumnName = normalizeIdentifier(foreignKey.getFkcolumn_name());
+          int childIntegerIndex = getColumnIndexFromTableColumns(childCols, fkColumnName);
+          if (childIntegerIndex == -1) {
+            throw new InvalidObjectException("Child column not found: " + fkColumnName);
+          }
 
-      AttachedMTableInfo nChildTable = getMTable(fkTableDB, fkTableName, retrieveCD);
-      MTable childTable = nChildTable.mtbl;
-      if (childTable == null) {
-        throw new InvalidObjectException("Child table not found: " + fkTableName);
-      }
+          final String pkColumnName = normalizeIdentifier(foreignKey.getPkcolumn_name());
+          int parentIntegerIndex = getColumnIndexFromTableColumns(parentCols, pkColumnName);
+          if (parentIntegerIndex == -1) {
+            throw new InvalidObjectException("Parent column not found: " + pkColumnName);
+          }
 
-      MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
-      List<MFieldSchema> parentCols = parentCD == null ? null : parentCD.getCols();
-      int parentIntegerIndex = getColumnIndexFromTableColumns(parentCols, pkColumnName);
-      if (parentIntegerIndex == -1) {
-        throw new InvalidObjectException("Parent column not found: " + pkColumnName);
-      }
-
-      MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
-      List<MFieldSchema> childCols = childCD.getCols();
-      int childIntegerIndex = getColumnIndexFromTableColumns(childCols, fkColumnName);
-      if (childIntegerIndex == -1) {
-        throw new InvalidObjectException("Child column not found: " + fkColumnName);
-      }
-
-      if (fks.get(i).getFk_name() == null) {
-        // When there is no explicit foreign key name associated with the constraint and the key is composite,
-        // we expect the foreign keys to be send in order in the input list.
-        // Otherwise, the below code will break.
-        // If this is the first column of the FK constraint, generate the foreign key name
-        // NB: The below code can result in race condition where duplicate names can be generated (in theory).
-        // However, this scenario can be ignored for practical purposes because of
-        // the uniqueness of the generated constraint name.
-        if (fks.get(i).getKey_seq() == 1) {
-          currentConstraintName = generateConstraintName(
-            fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
+          if (foreignKey.getFk_name() == null) {
+            // When there is no explicit foreign key name associated with the constraint and the key is composite,
+            // we expect the foreign keys to be send in order in the input list.
+            // Otherwise, the below code will break.
+            // If this is the first column of the FK constraint, generate the foreign key name
+            // NB: The below code can result in race condition where duplicate names can be generated (in theory).
+            // However, this scenario can be ignored for practical purposes because of
+            // the uniqueness of the generated constraint name.
+            if (foreignKey.getKey_seq() == 1) {
+              currentConstraintName = generateConstraintName(
+                fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
+            }
+          } else {
+            currentConstraintName = normalizeIdentifier(foreignKey.getFk_name());
+          }
+          fkNames.add(currentConstraintName);
+          Integer updateRule = foreignKey.getUpdate_rule();
+          Integer deleteRule = foreignKey.getDelete_rule();
+          int enableValidateRely = (foreignKey.isEnable_cstr() ? 4 : 0) +
+                  (foreignKey.isValidate_cstr() ? 2 : 0) + (foreignKey.isRely_cstr() ? 1 : 0);
+          MConstraint mpkfk = new MConstraint(
+            currentConstraintName,
+            MConstraint.FOREIGN_KEY_CONSTRAINT,
+            foreignKey.getKey_seq(),
+            deleteRule,
+            updateRule,
+            enableValidateRely,
+            parentTable,
+            childTable,
+            parentCD,
+            childCD,
+            childIntegerIndex,
+            parentIntegerIndex
+          );
+          mpkfks.add(mpkfk);
+
+          final String fkColType = getColumnFromTableColumns(childCols, fkColumnName).getType();
+          fkSignature.append(
+              generateColNameTypeSignature(fkColumnName, fkColType));
+          referencedKSignature.append(
+              generateColNameTypeSignature(pkColumnName, fkColType));
+
+          if (i + 1 < foreignKeys.size() && foreignKeys.get(i + 1).getKey_seq() == 1) {
+            // Next one is a new key, we bail out from the inner loop
+            break;
+          }
         }
-      } else {
-        currentConstraintName = normalizeIdentifier(fks.get(i).getFk_name());
-      }
-      fkNames.add(currentConstraintName);
-      Integer updateRule = fks.get(i).getUpdate_rule();
-      Integer deleteRule = fks.get(i).getDelete_rule();
-      int enableValidateRely = (fks.get(i).isEnable_cstr() ? 4 : 0) +
-      (fks.get(i).isValidate_cstr() ? 2 : 0) + (fks.get(i).isRely_cstr() ? 1 : 0);
-      MConstraint mpkfk = new MConstraint(
-        currentConstraintName,
-        MConstraint.FOREIGN_KEY_CONSTRAINT,
-        fks.get(i).getKey_seq(),
-        deleteRule,
-        updateRule,
-        enableValidateRely,
-        parentTable,
-        childTable,
-        parentCD,
-        childCD,
-        childIntegerIndex,
-        parentIntegerIndex
-      );
-      mpkfks.add(mpkfk);
+        String referenced = referencedKSignature.toString();
+        if (!validPKsOrUnique.contains(referenced)) {
+          throw new MetaException(
+              "Foreign key references " + referenced + " but no corresponding "
+              + "primary key or unique key exists. Possible keys: " + validPKsOrUnique);
+        }
+        if (sameTable && fkSignature.toString().equals(referenced)) {
+          throw new MetaException(
+              "Cannot be both foreign key and primary/unique key on same table: " + referenced);
+        }
+        fkSignature = new StringBuilder();
+        referencedKSignature = new StringBuilder();
+      }
+      pm.makePersistentAll(mpkfks);
     }
-    pm.makePersistentAll(mpkfks);
     return fkNames;
   }
 
+  private static Set<String> generateValidPKsOrUniqueSignatures(List<MFieldSchema> tableCols,
+      List<SQLPrimaryKey> refTablePrimaryKeys, List<SQLUniqueConstraint> refTableUniqueConstraints) {
+    final Set<String> validPKsOrUnique = new HashSet<>();
+    if (!refTablePrimaryKeys.isEmpty()) {
+      Collections.sort(refTablePrimaryKeys, new Comparator<SQLPrimaryKey>() {
+        @Override
+        public int compare(SQLPrimaryKey o1, SQLPrimaryKey o2) {
+          int keyNameComp = o1.getPk_name().compareTo(o2.getPk_name());
+          if (keyNameComp == 0) { return Integer.compare(o1.getKey_seq(), o2.getKey_seq()); }
+          return keyNameComp;
+        }
+      });
+      StringBuilder pkSignature = new StringBuilder();
+      for (SQLPrimaryKey pk : refTablePrimaryKeys) {
+        pkSignature.append(
+            generateColNameTypeSignature(
+                pk.getColumn_name(), getColumnFromTableColumns(tableCols, pk.getColumn_name()).getType()));
+      }
+      validPKsOrUnique.add(pkSignature.toString());
+    }
+    if (!refTableUniqueConstraints.isEmpty()) {
+      Collections.sort(refTableUniqueConstraints, new Comparator<SQLUniqueConstraint>() {
+        @Override
+        public int compare(SQLUniqueConstraint o1, SQLUniqueConstraint o2) {
+          int keyNameComp = o1.getUk_name().compareTo(o2.getUk_name());
+          if (keyNameComp == 0) { return Integer.compare(o1.getKey_seq(), o2.getKey_seq()); }
+          return keyNameComp;
+        }
+      });
+      StringBuilder ukSignature = new StringBuilder();
+      for (int j = 0; j < refTableUniqueConstraints.size(); j++) {
+        SQLUniqueConstraint uk = refTableUniqueConstraints.get(j);
+        ukSignature.append(
+            generateColNameTypeSignature(
+                uk.getColumn_name(), getColumnFromTableColumns(tableCols, uk.getColumn_name()).getType()));
+        if (j + 1 < refTableUniqueConstraints.size()) {
+          if (!refTableUniqueConstraints.get(j + 1).getUk_name().equals(
+                  refTableUniqueConstraints.get(j).getUk_name())) {
+            validPKsOrUnique.add(ukSignature.toString());
+            ukSignature = new StringBuilder();
+          }
+        } else {
+          validPKsOrUnique.add(ukSignature.toString());
+        }
+      }
+    }
+    return validPKsOrUnique;
+  }
+
+  private static String generateColNameTypeSignature(String colName, String colType) {
+    return colName + ":" + colType + ";";
+  }
+
   @Override
   public List<String> addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException,
     MetaException {


[2/2] hive git commit: HIVE-16604: Use [NOT] ENFORCED for column constraint characteristics (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
HIVE-16604: Use [NOT] ENFORCED for column constraint characteristics (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/abcadab7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/abcadab7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/abcadab7

Branch: refs/heads/master
Commit: abcadab7c219c1458bdcab7a9379a928e3ea9305
Parents: c4c7568
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Wed Oct 18 14:49:34 2017 -0700
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Thu Oct 19 11:06:27 2017 -0700

----------------------------------------------------------------------
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |   4 +-
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   1 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |   8 ++
 .../hadoop/hive/ql/parse/IdentifiersParser.g    |   2 +-
 .../create_with_constraints_enforced.q          |   1 +
 .../clientpositive/create_with_constraints2.q   |   8 ++
 .../create_with_constraints_enable.q.out        |   2 +-
 .../create_with_constraints_enforced.q.out      |   1 +
 .../create_with_constraints2.q.out              | 120 +++++++++++++++++++
 9 files changed, 143 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 50a55d8..3efeecd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -793,8 +793,8 @@ public abstract class BaseSemanticAnalyzer {
     }
     if (enable) {
       throw new SemanticException(
-          ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("ENABLE feature not supported yet. "
-              + "Please use DISABLE instead."));
+          ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("ENABLE/ENFORCED feature not supported yet. "
+              + "Please use DISABLE/NOT ENFORCED instead."));
     }
     if (validate) {
       throw new SemanticException(

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index d0ce4ae..74f9d95 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -332,6 +332,7 @@ KW_PRIMARY: 'PRIMARY';
 KW_FOREIGN: 'FOREIGN';
 KW_REFERENCES: 'REFERENCES';
 KW_CONSTRAINT: 'CONSTRAINT';
+KW_ENFORCED: 'ENFORCED';
 KW_VALIDATE: 'VALIDATE';
 KW_NOVALIDATE: 'NOVALIDATE';
 KW_RELY: 'RELY';

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index c2d8904..a2e55ed 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -2190,6 +2190,7 @@ enableValidateSpecification
 @init { pushMsg("enable specification", state); }
 @after { popMsg(state); }
     : enableSpecification validateSpecification?
+    | enforcedSpecification
     ;
 
 enableSpecification
@@ -2206,6 +2207,13 @@ validateSpecification
     | KW_NOVALIDATE -> ^(TOK_NOVALIDATE)
     ;
 
+enforcedSpecification
+@init { pushMsg("enforced specification", state); }
+@after { popMsg(state); }
+    : KW_ENFORCED -> ^(TOK_ENABLE)
+    | KW_NOT KW_ENFORCED -> ^(TOK_DISABLE)
+    ;
+
 relySpecification
 @init { pushMsg("rely specification", state); }
 @after { popMsg(state); }

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 0a7b7b2..69a1d63 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -793,7 +793,7 @@ nonReserved
     | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | KW_CONTINUE | KW_DATA | KW_DAY
     | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DEFERRED | KW_DEFINED | KW_DELIMITED | KW_DEPENDENCY 
     | KW_DESC | KW_DIRECTORIES | KW_DIRECTORY | KW_DISABLE | KW_DISTRIBUTE | KW_DOW | KW_ELEM_TYPE 
-    | KW_ENABLE | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT
+    | KW_ENABLE | KW_ENFORCED | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT
     | KW_FIRST | KW_FORMAT | KW_FORMATTED | KW_FUNCTIONS | KW_HOLD_DDLTIME | KW_HOUR | KW_IDXPROPERTIES | KW_IGNORE
     | KW_INDEX | KW_INDEXES | KW_INPATH | KW_INPUTDRIVER | KW_INPUTFORMAT | KW_ITEMS | KW_JAR | KW_KILL
     | KW_KEYS | KW_KEY_TYPE | KW_LAST | KW_LIMIT | KW_OFFSET | KW_LINES | KW_LOAD | KW_LOCATION | KW_LOCK | KW_LOCKS | KW_LOGICAL | KW_LONG

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/test/queries/clientnegative/create_with_constraints_enforced.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_with_constraints_enforced.q b/ql/src/test/queries/clientnegative/create_with_constraints_enforced.q
new file mode 100644
index 0000000..5b90c0e
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/create_with_constraints_enforced.q
@@ -0,0 +1 @@
+CREATE TABLE table1 (a STRING, b STRING, primary key (a) enforced);

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/test/queries/clientpositive/create_with_constraints2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_with_constraints2.q b/ql/src/test/queries/clientpositive/create_with_constraints2.q
new file mode 100644
index 0000000..22ea23d
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/create_with_constraints2.q
@@ -0,0 +1,8 @@
+CREATE TABLE table1 (a STRING, b STRING, PRIMARY KEY (a) NOT ENFORCED);
+CREATE TABLE table2 (a STRING, b STRING, PRIMARY KEY (a) NOT ENFORCED RELY);
+
+DESCRIBE EXTENDED table1;
+DESCRIBE EXTENDED table2;
+
+DESCRIBE FORMATTED table1;
+DESCRIBE FORMATTED table2;

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/test/results/clientnegative/create_with_constraints_enable.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_with_constraints_enable.q.out b/ql/src/test/results/clientnegative/create_with_constraints_enable.q.out
index 65d3bfe..2b24412 100644
--- a/ql/src/test/results/clientnegative/create_with_constraints_enable.q.out
+++ b/ql/src/test/results/clientnegative/create_with_constraints_enable.q.out
@@ -1 +1 @@
-FAILED: SemanticException [Error 10326]: Invalid Constraint syntax ENABLE feature not supported yet. Please use DISABLE instead.
+FAILED: SemanticException [Error 10326]: Invalid Constraint syntax ENABLE/ENFORCED feature not supported yet. Please use DISABLE/NOT ENFORCED instead.

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/test/results/clientnegative/create_with_constraints_enforced.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_with_constraints_enforced.q.out b/ql/src/test/results/clientnegative/create_with_constraints_enforced.q.out
new file mode 100644
index 0000000..2b24412
--- /dev/null
+++ b/ql/src/test/results/clientnegative/create_with_constraints_enforced.q.out
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10326]: Invalid Constraint syntax ENABLE/ENFORCED feature not supported yet. Please use DISABLE/NOT ENFORCED instead.

http://git-wip-us.apache.org/repos/asf/hive/blob/abcadab7/ql/src/test/results/clientpositive/create_with_constraints2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_with_constraints2.q.out b/ql/src/test/results/clientpositive/create_with_constraints2.q.out
new file mode 100644
index 0000000..eea74d9
--- /dev/null
+++ b/ql/src/test/results/clientpositive/create_with_constraints2.q.out
@@ -0,0 +1,120 @@
+PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, PRIMARY KEY (a) NOT ENFORCED)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table1
+POSTHOOK: query: CREATE TABLE table1 (a STRING, b STRING, PRIMARY KEY (a) NOT ENFORCED)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table1
+PREHOOK: query: CREATE TABLE table2 (a STRING, b STRING, PRIMARY KEY (a) NOT ENFORCED RELY)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table2
+POSTHOOK: query: CREATE TABLE table2 (a STRING, b STRING, PRIMARY KEY (a) NOT ENFORCED RELY)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table2
+PREHOOK: query: DESCRIBE EXTENDED table1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table1
+POSTHOOK: query: DESCRIBE EXTENDED table1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table1
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+#### A masked pattern was here ####
+Constraints	Primary Key for default.table1:[a], Constraint Name: #### A masked pattern was here ####	 
+PREHOOK: query: DESCRIBE EXTENDED table2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table2
+POSTHOOK: query: DESCRIBE EXTENDED table2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table2
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+#### A masked pattern was here ####
+Constraints	Primary Key for default.table2:[a], Constraint Name: #### A masked pattern was here ####	 
+PREHOOK: query: DESCRIBE FORMATTED table1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table1
+POSTHOOK: query: DESCRIBE FORMATTED table1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table1
+# col_name            	data_type           	comment             
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+	 	 
+# Constraints	 	 
+	 	 
+# Primary Key	 	 
+Table:              	default.table1      	 
+Constraint Name:    	#### A masked pattern was here ####	 
+Column Names:       	a                   	 
+PREHOOK: query: DESCRIBE FORMATTED table2
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@table2
+POSTHOOK: query: DESCRIBE FORMATTED table2
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@table2
+# col_name            	data_type           	comment             
+a                   	string              	                    
+b                   	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
+	numFiles            	0                   
+	numRows             	0                   
+	rawDataSize         	0                   
+	totalSize           	0                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+	 	 
+# Constraints	 	 
+	 	 
+# Primary Key	 	 
+Table:              	default.table2      	 
+Constraint Name:    	#### A masked pattern was here ####	 
+Column Names:       	a