You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sa...@apache.org on 2018/06/15 03:17:03 UTC

hive git commit: HIVE-19881: Allow metadata-only dump for database which are not source of replication (Mahesh Kumar Behera, reviewed by Sankar Hariappan)

Repository: hive
Updated Branches:
  refs/heads/master 0a5b3b4d3 -> 368d9cffe


HIVE-19881: Allow metadata-only dump for database which are not source of replication (Mahesh Kumar Behera, reviewed by Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/368d9cff
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/368d9cff
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/368d9cff

Branch: refs/heads/master
Commit: 368d9cffe77bf6b7076ca83ec300c69fde651cb7
Parents: 0a5b3b4
Author: Sankar Hariappan <sa...@apache.org>
Authored: Fri Jun 15 08:46:22 2018 +0530
Committer: Sankar Hariappan <sa...@apache.org>
Committed: Fri Jun 15 08:46:22 2018 +0530

----------------------------------------------------------------------
 .../hive/ql/parse/TestReplicationScenarios.java |  4 +++
 ...TestReplicationScenariosAcrossInstances.java | 36 ++++++++++++++++++++
 .../ql/parse/ReplicationSemanticAnalyzer.java   | 29 +++++++++-------
 3 files changed, 57 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/368d9cff/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index f4cdf02..862140f 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -3162,6 +3162,10 @@ public class TestReplicationScenarios {
     String dbName = createDBNonRepl(testName.getMethodName(), driver);
     verifyFail("REPL DUMP " + dbName, driver);
     verifyFail("REPL DUMP " + dbName + " from 1 ", driver);
+    assertTrue(run("REPL DUMP " + dbName + " with ('hive.repl.dump.metadata.only' = 'true')",
+            true, driver));
+    assertTrue(run("REPL DUMP " + dbName + " from 1  with ('hive.repl.dump.metadata.only' = 'true')",
+            true, driver));
     run("alter database " + dbName + " set dbproperties ('repl.source.for' = '1, 2, 3')", driver);
     assertTrue(run("REPL DUMP " + dbName, true, driver));
     assertTrue(run("REPL DUMP " + dbName + " from 1 ", true, driver));

http://git-wip-us.apache.org/repos/asf/hive/blob/368d9cff/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
index 4b3a976..26e308c 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
@@ -404,6 +404,42 @@ public class TestReplicationScenariosAcrossInstances {
   }
 
   @Test
+  public void testNonReplDBMetadataReplication() throws Throwable {
+    String dbName = primaryDbName + "_metadata";
+    WarehouseInstance.Tuple tuple = primary
+            .run("create database " + dbName)
+            .run("use " + dbName)
+            .run("create table table1 (i int, j int)")
+            .run("create table table2 (a int, city string) partitioned by (country string)")
+            .run("create table table3 (i int, j int)")
+            .run("insert into table1 values (1,2)")
+            .dump(dbName, null, Arrays.asList("'hive.repl.dump.metadata.only'='true'"));
+
+    replica.load(replicatedDbName, tuple.dumpLocation)
+            .run("use " + replicatedDbName)
+            .run("show tables")
+            .verifyResults(new String[]{"table1", "table2", "table3"})
+            .run("select * from table1")
+            .verifyResults(Collections.emptyList());
+
+    tuple = primary
+            .run("use " + dbName)
+            .run("alter table table1 rename to renamed_table1")
+            .run("insert into table2 partition(country='india') values (1,'mumbai') ")
+            .run("create table table4 (i int, j int)")
+            .dump(dbName, tuple.lastReplicationId, Arrays.asList("'hive.repl.dump.metadata.only'='true'"));
+
+    replica.load(replicatedDbName, tuple.dumpLocation)
+            .run("use " + replicatedDbName)
+            .run("show tables")
+            .verifyResults(new String[] { "renamed_table1", "table2", "table3", "table4" })
+            .run("select * from renamed_table1")
+            .verifyResults(Collections.emptyList())
+            .run("select * from table2")
+            .verifyResults(Collections.emptyList());
+  }
+
+    @Test
   public void testBootStrapDumpOfWarehouse() throws Throwable {
     String randomOne = RandomStringUtils.random(10, true, false);
     String randomTwo = RandomStringUtils.random(10, true, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/368d9cff/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index 356a8c4..9753b5c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -134,20 +134,9 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
 
   private void initReplDump(ASTNode ast) throws HiveException {
     int numChildren = ast.getChildCount();
+    boolean isMetaDataOnly = false;
     dbNameOrPattern = PlanUtils.stripQuotes(ast.getChild(0).getText());
 
-    for (String dbName : Utils.matchesDb(db, dbNameOrPattern)) {
-      Database database = db.getDatabase(dbName);
-      if (database != null) {
-        if (!ReplChangeManager.isSourceOfReplication(database)) {
-          throw new SemanticException("Cannot dump database " + dbNameOrPattern +
-                  " as it is not a source of replication");
-        }
-      } else {
-        throw new SemanticException("Cannot dump database " + dbNameOrPattern + " as it does not exist");
-      }
-    }
-
     // skip the first node, which is always required
     int currNode = 1;
     while (currNode < numChildren) {
@@ -157,6 +146,10 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
         if (null != replConfigs) {
           for (Map.Entry<String, String> config : replConfigs.entrySet()) {
             conf.set(config.getKey(), config.getValue());
+            if ("hive.repl.dump.metadata.only".equalsIgnoreCase(config.getKey()) &&
+                    "true".equalsIgnoreCase(config.getValue())) {
+              isMetaDataOnly = true;
+            }
           }
         }
       } else if (ast.getChild(currNode).getType() == TOK_TABNAME) {
@@ -187,6 +180,18 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
       // move to the next root node
       currNode++;
     }
+
+    for (String dbName : Utils.matchesDb(db, dbNameOrPattern)) {
+      Database database = db.getDatabase(dbName);
+      if (database != null) {
+        if (!ReplChangeManager.isSourceOfReplication(database) && !isMetaDataOnly) {
+          throw new SemanticException("Cannot dump database " + dbName +
+                  " as it is not a source of replication");
+        }
+      } else {
+        throw new SemanticException("Cannot dump database " + dbName + " as it does not exist");
+      }
+    }
   }
 
   // REPL DUMP