You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sa...@apache.org on 2018/06/18 00:28:21 UTC

hive git commit: HIVE-19880: Repl Load to return recoverable vs non-recoverable error codes (Mahesh Kumar Behera, reviewed by Sankar Hariappan)

Repository: hive
Updated Branches:
  refs/heads/master 24da46034 -> f83d7654e


HIVE-19880: Repl Load to return recoverable vs non-recoverable error codes (Mahesh Kumar Behera, reviewed by Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f83d7654
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f83d7654
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f83d7654

Branch: refs/heads/master
Commit: f83d7654ee8e6758c0026ed53a3a928914640e38
Parents: 24da460
Author: Sankar Hariappan <sa...@apache.org>
Authored: Sun Jun 17 17:28:02 2018 -0700
Committer: Sankar Hariappan <sa...@apache.org>
Committed: Sun Jun 17 17:28:02 2018 -0700

----------------------------------------------------------------------
 .../hive/ql/parse/TestReplicationScenarios.java | 30 +++++++++++++++++++-
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 22 ++++++++++++++
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     | 11 +++++++
 .../hadoop/hive/ql/exec/ReplCopyTask.java       |  3 +-
 .../hadoop/hive/ql/exec/repl/ReplDumpTask.java  |  3 +-
 .../ql/exec/repl/bootstrap/ReplLoadTask.java    |  3 +-
 .../filesystem/DatabaseEventsIterator.java      |  4 +--
 .../ql/parse/ReplicationSemanticAnalyzer.java   | 22 +++++++-------
 .../hadoop/hive/ql/parse/repl/CopyUtils.java    | 16 ++++++-----
 .../hive/ql/parse/repl/dump/TableExport.java    |  2 +-
 .../ql/parse/repl/dump/io/FileOperations.java   |  5 ++--
 .../hive/metastore/HiveMetaStoreClient.java     |  5 +++-
 .../hive/metastore/messaging/EventUtils.java    |  4 +--
 13 files changed, 99 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 862140f..689c859 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -73,6 +73,7 @@ import org.junit.rules.TestName;
 import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 
 import javax.annotation.Nullable;
 
@@ -853,7 +854,8 @@ public class TestReplicationScenarios {
     InjectableBehaviourObjectStore.setGetNextNotificationBehaviour(eventIdSkipper);
 
     advanceDumpDir();
-    verifyFail("REPL DUMP " + dbName + " FROM " + replDumpId, driver);
+    CommandProcessorResponse ret = driver.run("REPL DUMP " + dbName + " FROM " + replDumpId);
+    assertTrue(ret.getResponseCode() == ErrorMsg.REPL_EVENTS_MISSING_IN_METASTORE.getErrorCode());
     eventIdSkipper.assertInjectionsPerformed(true,false);
     InjectableBehaviourObjectStore.resetGetNextNotificationBehaviour(); // reset the behaviour
   }
@@ -3158,6 +3160,32 @@ public class TestReplicationScenarios {
   }
 
   @Test
+  public void testLoadCmPathMissing() throws IOException {
+    String dbName = createDB(testName.getMethodName(), driver);
+    run("CREATE TABLE " + dbName + ".normal(a int)", driver);
+    run("INSERT INTO " + dbName + ".normal values (1)", driver);
+
+    advanceDumpDir();
+    run("repl dump " + dbName, true, driver);
+    String dumpLocation = getResult(0, 0, driver);
+
+    run("DROP TABLE " + dbName + ".normal", driver);
+
+    String cmDir = hconf.getVar(HiveConf.ConfVars.REPLCMDIR);
+    Path path = new Path(cmDir);
+    FileSystem fs = path.getFileSystem(hconf);
+    ContentSummary cs = fs.getContentSummary(path);
+    long fileCount = cs.getFileCount();
+    assertTrue(fileCount != 0);
+    fs.delete(path);
+
+    CommandProcessorResponse ret = driverMirror.run("REPL LOAD " + dbName + " FROM '" + dumpLocation + "'");
+    assertTrue(ret.getResponseCode() == ErrorMsg.REPL_FILE_MISSING_FROM_SRC_AND_CM_PATH.getErrorCode());
+    run("drop database " + dbName, true, driver);
+    fs.create(path, false);
+  }
+
+  @Test
   public void testDumpNonReplDatabase() throws IOException {
     String dbName = createDBNonRepl(testName.getMethodName(), driver);
     verifyFail("REPL DUMP " + dbName, driver);

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index d47c136..850b2d5 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -76,6 +76,7 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 
 import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME;
 import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
@@ -2927,6 +2928,27 @@ public class TestJdbcDriver2 {
     stmt.close();
   }
 
+  @Test
+  public void testReplErrorScenarios() throws Exception {
+    HiveStatement stmt = (HiveStatement) con.createStatement();
+
+    try {
+      // source of replication not set
+      stmt.execute("repl dump default");
+    } catch(SQLException e){
+      assertTrue(e.getErrorCode() == ErrorMsg.REPL_DATABASE_IS_NOT_SOURCE_OF_REPLICATION.getErrorCode());
+    }
+
+    try {
+      // invalid load path
+      stmt.execute("repl load default1 from '/tmp/junk'");
+    } catch(SQLException e){
+      assertTrue(e.getErrorCode() == ErrorMsg.REPL_LOAD_PATH_NOT_FOUND.getErrorCode());
+    }
+
+    stmt.close();
+  }
+
   /**
    * Test {@link HiveStatement#executeAsync(String)} for an insert overwrite into a table
    * @throws Exception

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index bc2cffa..90d6b8f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -499,6 +499,15 @@ public enum ErrorMsg {
           " queue: {1}. Please fix and try again.", true),
   SPARK_RUNTIME_OOM(20015, "Spark job failed because of out of memory."),
 
+  //if the error message is changed for REPL_EVENTS_MISSING_IN_METASTORE, then need modification in getNextNotification
+  //method in HiveMetaStoreClient
+  REPL_EVENTS_MISSING_IN_METASTORE(20016, "Notification events are missing in the meta store."),
+  REPL_BOOTSTRAP_LOAD_PATH_NOT_VALID(20017, "Target database is bootstrapped from some other path."),
+  REPL_FILE_MISSING_FROM_SRC_AND_CM_PATH(20018, "File is missing from both source and cm path."),
+  REPL_LOAD_PATH_NOT_FOUND(20019, "Load path does not exist."),
+  REPL_DATABASE_IS_NOT_SOURCE_OF_REPLICATION(20020,
+          "Source of replication (repl.source.for) is not set in the database properties."),
+
   // An exception from runtime that will show the full stack to client
   UNRESOLVED_RT_EXCEPTION(29999, "Runtime Error: {0}", "58004", true),
 
@@ -588,6 +597,8 @@ public enum ErrorMsg {
   SPARK_GET_JOB_INFO_INTERRUPTED(30045, "Spark job was interrupted while getting job info"),
   SPARK_GET_JOB_INFO_EXECUTIONERROR(30046, "Spark job failed in execution while getting job info due to exception {0}"),
 
+  REPL_FILE_SYSTEM_OPERATION_RETRY(30047, "Replication file system operation retry expired."),
+
   //========================== 40000 range starts here ========================//
 
   SPARK_JOB_RUNTIME_ERROR(40001, "Spark job failed due to: {0}", true),

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
index 8a89103..3a7f1bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec;
 
 import org.apache.hadoop.hive.metastore.ReplChangeManager;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.EximUtil;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.plan.CopyWork;
@@ -165,7 +166,7 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable {
     } catch (Exception e) {
       LOG.error(StringUtils.stringifyException(e));
       setException(e);
-      return (1);
+      return ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
index ccdf04a..7e5f805 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTable
 import org.apache.hadoop.hive.metastore.messaging.event.filters.EventBoundaryFilter;
 import org.apache.hadoop.hive.metastore.messaging.event.filters.MessageFormatFilter;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.lockmgr.LockException;
@@ -123,7 +124,7 @@ public class ReplDumpTask extends Task<ReplDumpWork> implements Serializable {
     } catch (Exception e) {
       LOG.error("failed", e);
       setException(e);
-      return 1;
+      return ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
     }
     return 0;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/ReplLoadTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/ReplLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/ReplLoadTask.java
index 76fb2a3..50fe3ac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/ReplLoadTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/ReplLoadTask.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.repl.bootstrap;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.repl.ReplStateLogWork;
@@ -223,7 +224,7 @@ public class ReplLoadTask extends Task<ReplLoadWork> implements Serializable {
     } catch (Exception e) {
       LOG.error("failed replication", e);
       setException(e);
-      return 1;
+      return ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
     }
     LOG.info("completed load task run : {}", work.executedLoadTask());
     return 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/events/filesystem/DatabaseEventsIterator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/events/filesystem/DatabaseEventsIterator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/events/filesystem/DatabaseEventsIterator.java
index ecedf9b..f778cb4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/events/filesystem/DatabaseEventsIterator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/events/filesystem/DatabaseEventsIterator.java
@@ -91,8 +91,8 @@ class DatabaseEventsIterator implements Iterator<BootstrapEvent> {
       return true;
     } catch (Exception e) {
       // may be do some retry logic here.
-      throw new RuntimeException("could not traverse the file via remote iterator " + dbLevelPath,
-          e);
+      LOG.error("could not traverse the file via remote iterator " + dbLevelPath, e);
+      throw new RuntimeException(e.getMessage(), e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index 9753b5c..5aeae16 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -60,6 +60,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.REPL_DUMP_METADATA_ONLY;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DBNAME;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_LIMIT;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_REPL_CONFIG;
@@ -109,7 +110,7 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
         try {
           initReplDump(ast);
         } catch (HiveException e) {
-          throw new SemanticException("repl dump failed " + e.getMessage());
+          throw new SemanticException(e.getMessage(), e);
         }
         analyzeReplDump(ast);
         break;
@@ -146,11 +147,8 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
         if (null != replConfigs) {
           for (Map.Entry<String, String> config : replConfigs.entrySet()) {
             conf.set(config.getKey(), config.getValue());
-            if ("hive.repl.dump.metadata.only".equalsIgnoreCase(config.getKey()) &&
-                    "true".equalsIgnoreCase(config.getValue())) {
-              isMetaDataOnly = true;
-            }
           }
+          isMetaDataOnly = HiveConf.getBoolVar(conf, REPL_DUMP_METADATA_ONLY);
         }
       } else if (ast.getChild(currNode).getType() == TOK_TABNAME) {
         // optional tblName was specified.
@@ -184,12 +182,13 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
     for (String dbName : Utils.matchesDb(db, dbNameOrPattern)) {
       Database database = db.getDatabase(dbName);
       if (database != null) {
-        if (!ReplChangeManager.isSourceOfReplication(database) && !isMetaDataOnly) {
-          throw new SemanticException("Cannot dump database " + dbName +
-                  " as it is not a source of replication");
+        if (!isMetaDataOnly && !ReplChangeManager.isSourceOfReplication(database)) {
+          LOG.error("Cannot dump database " + dbNameOrPattern +
+                  " as it is not a source of replication (repl.source.for)");
+          throw new SemanticException(ErrorMsg.REPL_DATABASE_IS_NOT_SOURCE_OF_REPLICATION.getMsg());
         }
       } else {
-        throw new SemanticException("Cannot dump database " + dbName + " as it does not exist");
+        throw new SemanticException("Cannot dump database " + dbNameOrPattern + " as it does not exist");
       }
     }
   }
@@ -365,7 +364,8 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
 
       if (!fs.exists(loadPath)) {
         // supposed dump path does not exist.
-        throw new FileNotFoundException(loadPath.toUri().toString());
+        LOG.error("File not found " + loadPath.toUri().toString());
+        throw new FileNotFoundException(ErrorMsg.REPL_LOAD_PATH_NOT_FOUND.getMsg());
       }
 
       // Now, the dumped path can be one of three things:
@@ -511,7 +511,7 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
 
     } catch (Exception e) {
       // TODO : simple wrap & rethrow for now, clean up with error codes
-      throw new SemanticException(e);
+      throw new SemanticException(e.getMessage(), e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
index 79b4652..61bf6b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
@@ -26,6 +26,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.ReplChangeManager;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.HiveFatalException;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -68,7 +70,7 @@ public class CopyUtils {
   // changed/removed during copy, so double check the checksum after copy,
   // if not match, copy again from cm
   public void copyAndVerify(FileSystem destinationFs, Path destRoot,
-                    List<ReplChangeManager.FileInfo> srcFiles) throws IOException, LoginException {
+                    List<ReplChangeManager.FileInfo> srcFiles) throws IOException, LoginException, HiveFatalException {
     Map<FileSystem, Map< Path, List<ReplChangeManager.FileInfo>>> map = fsToFileMap(srcFiles, destRoot);
     for (Map.Entry<FileSystem, Map<Path, List<ReplChangeManager.FileInfo>>> entry : map.entrySet()) {
       FileSystem sourceFs = entry.getKey();
@@ -92,7 +94,7 @@ public class CopyUtils {
 
   private void doCopyRetry(FileSystem sourceFs, List<ReplChangeManager.FileInfo> srcFileList,
                            FileSystem destinationFs, Path destination,
-                           boolean useRegularCopy) throws IOException, LoginException {
+                           boolean useRegularCopy) throws IOException, LoginException, HiveFatalException {
     int repeat = 0;
     boolean isCopyError = false;
     List<Path> pathList = Lists.transform(srcFileList, ReplChangeManager.FileInfo::getEffectivePath);
@@ -145,7 +147,7 @@ public class CopyUtils {
     // If still files remains to be copied due to failure/checksum mismatch after several attempts, then throw error
     if (!pathList.isEmpty()) {
       LOG.error("File copy failed even after several attempts. Files list: " + pathList);
-      throw new IOException("File copy failed even after several attempts.");
+      throw new IOException(ErrorMsg.REPL_FILE_SYSTEM_OPERATION_RETRY.getMsg());
     }
   }
 
@@ -154,7 +156,7 @@ public class CopyUtils {
   // itself is missing, then throw error.
   private List<Path> getFilesToRetry(FileSystem sourceFs, List<ReplChangeManager.FileInfo> srcFileList,
                                      FileSystem destinationFs, Path destination, boolean isCopyError)
-          throws IOException {
+          throws IOException, HiveFatalException {
     List<Path> pathList = new ArrayList<Path>();
 
     // Going through file list and make the retry list
@@ -190,9 +192,9 @@ public class CopyUtils {
       srcPath = srcFile.getEffectivePath();
       if (null == srcPath) {
         // This case possible if CM path is not enabled.
-        LOG.error("File copy failed and likely source file is deleted or modified. "
+        LOG.error("File copy failed and likely source file is deleted or modified."
                 + "Source File: " + srcFile.getSourcePath());
-        throw new IOException("File copy failed and likely source file is deleted or modified.");
+        throw new HiveFatalException(ErrorMsg.REPL_FILE_MISSING_FROM_SRC_AND_CM_PATH.getMsg());
       }
 
       if (!srcFile.isUseSourcePath() && !sourceFs.exists(srcFile.getCmPath())) {
@@ -201,7 +203,7 @@ public class CopyUtils {
                 + "Missing Source File: " + srcFile.getSourcePath() + ", CM File: " + srcFile.getCmPath() + ". "
                 + "Try setting higher value for hive.repl.cm.retain in source warehouse. "
                 + "Also, bootstrap the system again to get back the consistent replicated state.");
-        throw new IOException("Both source and CM path are missing from source.");
+        throw new HiveFatalException(ErrorMsg.REPL_FILE_MISSING_FROM_SRC_AND_CM_PATH.getMsg());
       }
 
       pathList.add(srcPath);

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java
index 20ff23a..b60be88 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java
@@ -162,7 +162,7 @@ public class TableExport {
             .export(replicationSpec);
       }
     } catch (Exception e) {
-      throw new SemanticException(e);
+      throw new SemanticException(e.getMessage(), e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
index c923121..58eae38 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
@@ -21,12 +21,10 @@ import java.io.BufferedWriter;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 
 import javax.security.auth.login.LoginException;
 
-import org.apache.curator.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,6 +32,7 @@ import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.common.ValidWriteIdList;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.ReplChangeManager;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
@@ -161,7 +160,7 @@ public class FileOperations {
         logger.info("writeFilesList failed", e);
         if (repeat >= FileUtils.MAX_IO_ERROR_RETRY) {
           logger.error("exporting data files in dir : " + dataPathList + " to " + exportRootDataDir + " failed");
-          throw e;
+          throw new IOException(ErrorMsg.REPL_FILE_SYSTEM_OPERATION_RETRY.getMsg());
         }
 
         int sleepTime = FileUtils.getSleepTime(repeat - 1);

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 8990928..da41e6e 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -128,6 +128,9 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
   private long retryDelaySeconds = 0;
   private final ClientCapabilities version;
 
+  //copied from ErrorMsg.java
+  private static final String REPL_EVENTS_MISSING_IN_METASTORE = "Notification events are missing in the meta store.";
+  
   static final protected Logger LOG = LoggerFactory.getLogger(HiveMetaStoreClient.class);
 
   public HiveMetaStoreClient(Configuration conf) throws MetaException {
@@ -2717,7 +2720,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
                   + "Try setting higher value for hive.metastore.event.db.listener.timetolive. "
                   + "Also, bootstrap the system again to get back the consistent replicated state.",
                   nextEventId, e.getEventId());
-          throw new IllegalStateException("Notification events are missing.");
+          throw new IllegalStateException(REPL_EVENTS_MISSING_IN_METASTORE);
         }
         if ((filter != null) && filter.accept(e)) {
           filtered.addToEvents(e);

http://git-wip-us.apache.org/repos/asf/hive/blob/f83d7654/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
index 7d8c1d4..2b16897 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
@@ -93,7 +93,7 @@ public class EventUtils {
       try {
         return msc.getNextNotification(pos,getBatchSize(), filter).getEvents();
       } catch (TException e) {
-        throw new IOException(e);
+        throw new IOException(e.getMessage(), e);
       }
     }
   }
@@ -179,7 +179,7 @@ public class EventUtils {
         // but throwing the exception is the appropriate result here, and hasNext()
         // signature will only allow RuntimeExceptions. Iterator.hasNext() really
         // should have allowed IOExceptions
-        throw new RuntimeException(e);
+        throw new RuntimeException(e.getMessage(), e);
       }
       // New batch has been fetched. If it's not empty, we have more elements to process.
       return !batch.isEmpty();