You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by th...@apache.org on 2017/05/09 06:27:52 UTC

hive git commit: HIVE-13652 : Import table change order of dynamic partitions (Sankar Hariappan via Thejas Nair)

Repository: hive
Updated Branches:
  refs/heads/master d6db6ffff -> 6bd3f6f62


HIVE-13652 : Import table change order of dynamic partitions  (Sankar Hariappan via Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6bd3f6f6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6bd3f6f6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6bd3f6f6

Branch: refs/heads/master
Commit: 6bd3f6f6233ccaa377c06717065b25826a16315e
Parents: d6db6ff
Author: Sankar Hariappan <ma...@gmail.com>
Authored: Mon May 8 23:27:47 2017 -0700
Committer: Thejas M Nair <th...@hortonworks.com>
Committed: Mon May 8 23:27:47 2017 -0700

----------------------------------------------------------------------
 .../hive/ql/TestReplicationScenarios.java       | 122 +++++++++++++++++++
 .../apache/hadoop/hive/ql/parse/EximUtil.java   |   4 +-
 2 files changed, 124 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/6bd3f6f6/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
index 5173d8b..871b3f2 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
@@ -1059,6 +1059,113 @@ public class TestReplicationScenarios {
   }
 
   @Test
+  public void testInsertToMultiKeyPartition() throws IOException {
+    String testName = "insertToMultiKeyPartition";
+    LOG.info("Testing " + testName);
+    String dbName = testName + "_" + tid;
+
+    run("CREATE DATABASE " + dbName);
+    run("CREATE TABLE " + dbName + ".namelist(name string) partitioned by (year int, month int, day int) STORED AS TEXTFILE");
+    run("USE " + dbName);
+
+    String[] ptn_data_1 = new String[] { "abraham", "bob", "carter" };
+    String[] ptn_year_1980 = new String[] { "abraham", "bob" };
+    String[] ptn_day_1 = new String[] { "abraham", "carter" };
+    String[] ptn_year_1984_month_4_day_1_1 = new String[] { "carter" };
+    String[] ptn_list_1 = new String[] { "year=1980/month=4/day=1", "year=1980/month=5/day=5", "year=1984/month=4/day=1" };
+
+    run("INSERT INTO TABLE " + dbName + ".namelist partition(year=1980,month=4,day=1) values('" + ptn_data_1[0] + "')");
+    run("INSERT INTO TABLE " + dbName + ".namelist partition(year=1980,month=5,day=5) values('" + ptn_data_1[1] + "')");
+    run("INSERT INTO TABLE " + dbName + ".namelist partition(year=1984,month=4,day=1) values('" + ptn_data_1[2] + "')");
+
+    verifySetup("SELECT name from " + dbName + ".namelist where (year=1980) ORDER BY name", ptn_year_1980);
+    verifySetup("SELECT name from " + dbName + ".namelist where (day=1) ORDER BY name", ptn_day_1);
+    verifySetup("SELECT name from " + dbName + ".namelist where (year=1984 and month=4 and day=1) ORDER BY name",
+                                                                                ptn_year_1984_month_4_day_1_1);
+    verifySetup("SELECT name from " + dbName + ".namelist ORDER BY name", ptn_data_1);
+    verifySetup("SHOW PARTITIONS " + dbName + ".namelist", ptn_list_1);
+    verifyRunWithPatternMatch("SHOW TABLE EXTENDED LIKE namelist PARTITION (year=1980,month=4,day=1)",
+                              "location", "namelist/year=1980/month=4/day=1");
+
+    advanceDumpDir();
+    run("REPL DUMP " + dbName);
+    String replDumpLocn = getResult(0, 0);
+    String replDumpId = getResult(0, 1, true);
+    LOG.info("Bootstrap-Dump: Dumped to {} with id {}", replDumpLocn, replDumpId);
+    run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
+
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist where (year=1980) ORDER BY name", ptn_year_1980);
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist where (day=1) ORDER BY name", ptn_day_1);
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist where (year=1984 and month=4 and day=1) ORDER BY name",
+                                                                                   ptn_year_1984_month_4_day_1_1);
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist ORDER BY name", ptn_data_1);
+    verifyRun("SHOW PARTITIONS " + dbName + "_dupe.namelist", ptn_list_1);
+
+    run("USE " + dbName + "_dupe");
+    verifyRunWithPatternMatch("SHOW TABLE EXTENDED LIKE namelist PARTITION (year=1980,month=4,day=1)",
+            "location", "namelist/year=1980/month=4/day=1");
+    run("USE " + dbName);
+
+    String[] ptn_data_2 = new String[] { "abraham", "bob", "carter", "david", "eugene" };
+    String[] ptn_year_1984_month_4_day_1_2 = new String[] { "carter", "david" };
+    String[] ptn_day_1_2 = new String[] { "abraham", "carter", "david" };
+    String[] ptn_list_2 = new String[] { "year=1980/month=4/day=1", "year=1980/month=5/day=5",
+                                         "year=1984/month=4/day=1", "year=1990/month=5/day=25" };
+
+    run("INSERT INTO TABLE " + dbName + ".namelist partition(year=1984,month=4,day=1) values('" + ptn_data_2[3] + "')");
+    run("INSERT INTO TABLE " + dbName + ".namelist partition(year=1990,month=5,day=25) values('" + ptn_data_2[4] + "')");
+
+    verifySetup("SELECT name from " + dbName + ".namelist where (year=1980) ORDER BY name", ptn_year_1980);
+    verifySetup("SELECT name from " + dbName + ".namelist where (day=1) ORDER BY name", ptn_day_1_2);
+    verifySetup("SELECT name from " + dbName + ".namelist where (year=1984 and month=4 and day=1) ORDER BY name",
+                                                                                ptn_year_1984_month_4_day_1_2);
+    verifySetup("SELECT name from " + dbName + ".namelist ORDER BY name", ptn_data_2);
+    verifyRun("SHOW PARTITIONS " + dbName + ".namelist", ptn_list_2);
+    verifyRunWithPatternMatch("SHOW TABLE EXTENDED LIKE namelist PARTITION (year=1990,month=5,day=25)",
+            "location", "namelist/year=1990/month=5/day=25");
+
+    advanceDumpDir();
+    run("REPL DUMP " + dbName + " FROM " + replDumpId);
+    String incrementalDumpLocn = getResult(0, 0);
+    String incrementalDumpId = getResult(0, 1, true);
+    LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);
+    replDumpId = incrementalDumpId;
+    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'");
+    printOutput();
+    run("REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'");
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist where (year=1980) ORDER BY name", ptn_year_1980);
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist where (day=1) ORDER BY name", ptn_day_1_2);
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist where (year=1984 and month=4 and day=1) ORDER BY name",
+                                                                                   ptn_year_1984_month_4_day_1_2);
+    verifyRun("SELECT name from " + dbName + "_dupe.namelist ORDER BY name", ptn_data_2);
+    verifyRun("SHOW PARTITIONS " + dbName + "_dupe.namelist", ptn_list_2);
+    run("USE " + dbName + "_dupe");
+    verifyRunWithPatternMatch("SHOW TABLE EXTENDED LIKE namelist PARTITION (year=1990,month=5,day=25)",
+            "location", "namelist/year=1990/month=5/day=25");
+    run("USE " + dbName);
+
+    String[] ptn_data_3 = new String[] { "abraham", "bob", "carter", "david", "fisher" };
+    String[] data_after_ovwrite = new String[] { "fisher" };
+    // Insert overwrite on existing partition
+    run("INSERT OVERWRITE TABLE " + dbName + ".namelist partition(year=1990,month=5,day=25) values('" + data_after_ovwrite[0] + "')");
+    verifySetup("SELECT name from " + dbName + ".namelist where (year=1990 and month=5 and day=25)", data_after_ovwrite);
+    verifySetup("SELECT name from " + dbName + ".namelist ORDER BY name", ptn_data_3);
+
+    advanceDumpDir();
+    run("REPL DUMP " + dbName + " FROM " + replDumpId);
+    incrementalDumpLocn = getResult(0, 0);
+    incrementalDumpId = getResult(0, 1, true);
+    LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);
+    replDumpId = incrementalDumpId;
+    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'");
+    printOutput();
+    run("REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'");
+
+    verifySetup("SELECT name from " + dbName + "_dupe.namelist where (year=1990 and month=5 and day=25)", data_after_ovwrite);
+    verifySetup("SELECT name from " + dbName + "_dupe.namelist ORDER BY name", ptn_data_3);
+  }
+
+  @Test
   public void testViewsReplication() throws IOException {
     String testName = "viewsReplication";
     String dbName = createDB(testName);
@@ -1808,6 +1915,21 @@ public class TestReplicationScenarios {
     assertFalse(success);
   }
 
+  private void verifyRunWithPatternMatch(String cmd, String key, String pattern) throws IOException {
+    run(cmd);
+    List<String> results = getOutput();
+    assertTrue(results.size() > 0);
+    boolean success = false;
+    for (int i = 0; i < results.size(); i++) {
+      if (results.get(i).contains(key) && results.get(i).contains(pattern)) {
+         success = true;
+         break;
+      }
+    }
+
+    assertTrue(success);
+  }
+
   private static void run(String cmd) throws RuntimeException {
     try {
     run(cmd,false); // default arg-less run simply runs, and does not care about failure

http://git-wip-us.apache.org/repos/asf/hive/blob/6bd3f6f6/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
index a9384be..83de9ee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -51,10 +51,10 @@ import java.net.URISyntaxException;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
-import java.util.TreeMap;
 
 /**
  *
@@ -335,7 +335,7 @@ public class EximUtil {
    * @return the partition specification as a map
    */
   public static Map<String, String> makePartSpec(List<FieldSchema> partCols, List<String> partVals) {
-    Map<String, String> partSpec = new TreeMap<String, String>();
+    Map<String, String> partSpec = new LinkedHashMap<String, String>();
     for (int i = 0; i < partCols.size(); ++i) {
       partSpec.put(partCols.get(i).getName(), partVals.get(i));
     }