You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2018/02/07 08:52:10 UTC

[3/3] hive git commit: HIVE-17991: Remove CommandNeedRetryException (Zoltan Haindrich reviewed by Ashutosh Chauhan)

HIVE-17991: Remove CommandNeedRetryException (Zoltan Haindrich reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f7dea106
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f7dea106
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f7dea106

Branch: refs/heads/master
Commit: f7dea1060247dddccd69112e24200ec84d2847a3
Parents: f942e72
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Wed Feb 7 09:37:57 2018 +0100
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Wed Feb 7 09:37:57 2018 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   | 145 +++++++++----------
 .../hadoop/hive/cli/TestCliDriverMethods.java   |  11 +-
 .../org/apache/hive/hcatalog/cli/HCatCli.java   |   8 +-
 .../apache/hive/hcatalog/cli/HCatDriver.java    |   9 +-
 .../hive/hcatalog/cli/TestSemanticAnalysis.java |  50 +++----
 .../hive/hcatalog/cli/TestUseDatabase.java      |   3 +-
 .../hive/hcatalog/data/HCatDataCheckUtil.java   |  17 +--
 .../hive/hcatalog/data/TestReaderWriter.java    |   5 +-
 .../hcatalog/pig/AbstractHCatLoaderTest.java    |  17 +--
 .../hcatalog/pig/AbstractHCatStorerTest.java    |  30 ++--
 .../hive/hcatalog/pig/TestE2EScenarios.java     |   8 +-
 .../pig/TestHCatLoaderComplexSchema.java        |  30 ++--
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |  62 ++++----
 .../hive/hcatalog/pig/TestHCatStorer.java       |  31 ++--
 .../hive/hcatalog/pig/TestHCatStorerMulti.java  |   9 +-
 .../hcatalog/pig/TestHCatStorerWrapper.java     |   4 +-
 .../hcatalog/pig/TestParquetHCatLoader.java     |   5 -
 .../hcatalog/listener/TestMsgBusConnection.java |   3 +-
 .../hive/hcatalog/streaming/HiveEndPoint.java   |  34 ++---
 .../streaming/QueryFailedException.java         |   5 +-
 .../hive/hcatalog/streaming/TestStreaming.java  |  87 ++++++-----
 .../api/repl/commands/TestCommands.java         |  15 +-
 ...estDDLWithRemoteMetastoreSecondNamenode.java |  22 +--
 .../hive/ql/parse/TestReplicationScenarios.java |  88 ++++-------
 .../hadoop/hive/ql/parse/WarehouseInstance.java |  15 +-
 .../TestHiveAuthorizerCheckInvocation.java      |  52 +++----
 .../plugin/TestHiveAuthorizerShowFilters.java   |  26 ++--
 .../hive/ql/txn/compactor/TestCompactor.java    |   3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   8 +-
 .../hive/ql/CommandNeedRetryException.java      |  40 -----
 .../java/org/apache/hadoop/hive/ql/Context.java |  11 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  37 +----
 .../java/org/apache/hadoop/hive/ql/IDriver.java |   8 +-
 .../org/apache/hadoop/hive/ql/QueryDisplay.java |   6 -
 .../apache/hadoop/hive/ql/exec/FetchTask.java   |   7 +-
 .../org/apache/hadoop/hive/ql/exec/Task.java    |   8 -
 .../hive/ql/optimizer/GlobalLimitOptimizer.java |   2 +-
 .../hive/ql/parse/ExplainSemanticAnalyzer.java  |   3 -
 .../hadoop/hive/ql/parse/TaskCompiler.java      |  11 --
 .../hive/ql/processors/CommandProcessor.java    |   4 +-
 .../hive/ql/processors/CompileProcessor.java    |   3 +-
 .../hive/ql/processors/CryptoProcessor.java     |   3 +-
 .../hadoop/hive/ql/processors/DfsProcessor.java |   9 +-
 .../ql/processors/ListResourceProcessor.java    |   3 +-
 .../hive/ql/processors/ReloadProcessor.java     |   3 +-
 .../hive/ql/processors/ResetProcessor.java      |   5 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java    |   5 -
 .../ql/udf/generic/GenericUDTFGetSplits.java    |  11 +-
 .../hadoop/hive/ql/hooks/TestQueryHooks.java    |  11 +-
 .../hadoop/hive/ql/parse/TestColumnAccess.java  |   5 +-
 .../hive/ql/plan/TestReadEntityDirect.java      |   3 +-
 .../hive/ql/processors/TestResetProcessor.java  |   5 +-
 .../org/apache/hive/tmpl/QueryProfileTmpl.jamon |   2 -
 .../service/cli/operation/SQLOperation.java     |   7 -
 54 files changed, 372 insertions(+), 642 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index a78e0c6..e57412a 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -67,9 +67,7 @@ import org.apache.hadoop.hive.conf.Validator;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.IDriver;
-import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
 import org.apache.hadoop.hive.ql.exec.tez.TezJobExecHelper;
@@ -221,101 +219,88 @@ public class CliDriver {
   }
 
   int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) {
-    int tryCount = 0;
-    boolean needRetry;
     boolean escapeCRLF = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_ESCAPE_CRLF);
     int ret = 0;
 
-    do {
-      try {
-        needRetry = false;
-        if (proc != null) {
-          if (proc instanceof IDriver) {
-            IDriver qp = (IDriver) proc;
-            PrintStream out = ss.out;
-            long start = System.currentTimeMillis();
-            if (ss.getIsVerbose()) {
-              out.println(cmd);
-            }
+    if (proc != null) {
+      if (proc instanceof IDriver) {
+        IDriver qp = (IDriver) proc;
+        PrintStream out = ss.out;
+        long start = System.currentTimeMillis();
+        if (ss.getIsVerbose()) {
+          out.println(cmd);
+        }
 
-            qp.setTryCount(tryCount);
-            ret = qp.run(cmd).getResponseCode();
-            if (ret != 0) {
-              qp.close();
-              return ret;
-            }
+        ret = qp.run(cmd).getResponseCode();
+        if (ret != 0) {
+          qp.close();
+          return ret;
+        }
 
-            // query has run capture the time
-            long end = System.currentTimeMillis();
-            double timeTaken = (end - start) / 1000.0;
+        // query has run capture the time
+        long end = System.currentTimeMillis();
+        double timeTaken = (end - start) / 1000.0;
 
-            ArrayList<String> res = new ArrayList<String>();
+        ArrayList<String> res = new ArrayList<String>();
 
-            printHeader(qp, out);
+        printHeader(qp, out);
 
-            // print the results
-            int counter = 0;
-            try {
-              if (out instanceof FetchConverter) {
-                ((FetchConverter)out).fetchStarted();
-              }
-              while (qp.getResults(res)) {
-                for (String r : res) {
+        // print the results
+        int counter = 0;
+        try {
+          if (out instanceof FetchConverter) {
+            ((FetchConverter) out).fetchStarted();
+          }
+          while (qp.getResults(res)) {
+            for (String r : res) {
                   if (escapeCRLF) {
                     r = EscapeCRLFHelper.escapeCRLF(r);
                   }
-                  out.println(r);
-                }
-                counter += res.size();
-                res.clear();
-                if (out.checkError()) {
-                  break;
-                }
-              }
-            } catch (IOException e) {
-              console.printError("Failed with exception " + e.getClass().getName() + ":"
-                  + e.getMessage(), "\n"
-                  + org.apache.hadoop.util.StringUtils.stringifyException(e));
-              ret = 1;
+              out.println(r);
             }
-
-            int cret = qp.close();
-            if (ret == 0) {
-              ret = cret;
+            counter += res.size();
+            res.clear();
+            if (out.checkError()) {
+              break;
             }
+          }
+        } catch (IOException e) {
+          console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
+              "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+          ret = 1;
+        }
 
-            if (out instanceof FetchConverter) {
-              ((FetchConverter)out).fetchFinished();
-            }
+        int cret = qp.close();
+        if (ret == 0) {
+          ret = cret;
+        }
 
-            console.printInfo("Time taken: " + timeTaken + " seconds" +
-                (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
-          } else {
-            String firstToken = tokenizeCmd(cmd.trim())[0];
-            String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
+        if (out instanceof FetchConverter) {
+          ((FetchConverter) out).fetchFinished();
+        }
 
-            if (ss.getIsVerbose()) {
-              ss.out.println(firstToken + " " + cmd_1);
-            }
-            CommandProcessorResponse res = proc.run(cmd_1);
-            if (res.getResponseCode() != 0) {
-              ss.out.println("Query returned non-zero code: " + res.getResponseCode() +
-                  ", cause: " + res.getErrorMessage());
-            }
-            if (res.getConsoleMessages() != null) {
-              for (String consoleMsg : res.getConsoleMessages()) {
-                console.printInfo(consoleMsg);
-              }
-            }
-            ret = res.getResponseCode();
+        console.printInfo(
+            "Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
+      } else {
+        String firstToken = tokenizeCmd(cmd.trim())[0];
+        String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
+
+        if (ss.getIsVerbose()) {
+          ss.out.println(firstToken + " " + cmd_1);
+        }
+        CommandProcessorResponse res = proc.run(cmd_1);
+        if (res.getResponseCode() != 0) {
+          ss.out
+              .println("Query returned non-zero code: " + res.getResponseCode() + ", cause: " + res.getErrorMessage());
+        }
+        if (res.getConsoleMessages() != null) {
+          for (String consoleMsg : res.getConsoleMessages()) {
+            console.printInfo(consoleMsg);
           }
         }
-      } catch (CommandNeedRetryException e) {
-        console.printInfo("Retry query with a different approach...");
-        tryCount++;
-        needRetry = true;
+        ret = res.getResponseCode();
       }
-    } while (needRetry);
+    }
 
     return ret;
   }
@@ -398,7 +383,7 @@ public class CliDriver {
 
       // we can not use "split" function directly as ";" may be quoted
       List<String> commands = splitSemiColon(line);
-      
+
       String command = "";
       for (String oneCmd : commands) {
 
@@ -430,7 +415,7 @@ public class CliDriver {
       }
     }
   }
-  
+
   public static List<String> splitSemiColon(String line) {
     boolean insideSingleQuote = false;
     boolean insideDoubleQuote = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
----------------------------------------------------------------------
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
index 5bc9b69..c06ec3e 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
@@ -19,10 +19,8 @@ package org.apache.hadoop.hive.cli;
 
 
 import static org.mockito.Matchers.anyBoolean;
-import static org.mockito.Matchers.anyInt;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.times;
@@ -53,10 +51,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.util.Shell;
 
 
 // Cannot call class TestCliDriver since that's the name of the generated
@@ -80,7 +76,7 @@ public class TestCliDriverMethods extends TestCase {
   }
 
   // If the command has an associated schema, make sure it gets printed to use
-  public void testThatCliDriverPrintsHeaderForCommandsWithSchema() throws CommandNeedRetryException {
+  public void testThatCliDriverPrintsHeaderForCommandsWithSchema() {
     Schema mockSchema = mock(Schema.class);
     List<FieldSchema> fieldSchemas = new ArrayList<FieldSchema>();
     String fieldName = "FlightOfTheConchords";
@@ -94,8 +90,7 @@ public class TestCliDriverMethods extends TestCase {
   }
 
   // If the command has no schema, make sure nothing is printed
-  public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema()
-      throws CommandNeedRetryException {
+  public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() {
     Schema mockSchema = mock(Schema.class);
     when(mockSchema.getFieldSchemas()).thenReturn(null);
 
@@ -156,7 +151,7 @@ public class TestCliDriverMethods extends TestCase {
    * @throws CommandNeedRetryException
    *           won't actually be thrown
    */
-  private PrintStream headerPrintingTestDriver(Schema mockSchema) throws CommandNeedRetryException {
+  private PrintStream headerPrintingTestDriver(Schema mockSchema) {
     CliDriver cliDriver = new CliDriver();
 
     // We want the driver to try to print the header...

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
index ad31287..a36b0db 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
 import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -222,8 +221,9 @@ public class HCatCli {
   }
 
   private static void setConfProperties(HiveConf conf, Properties props) {
-    for (java.util.Map.Entry<Object, Object> e : props.entrySet())
+    for (java.util.Map.Entry<Object, Object> e : props.entrySet()) {
       conf.set((String) e.getKey(), (String) e.getValue());
+    }
   }
 
   private static int processLine(String line) {
@@ -307,10 +307,6 @@ public class HCatCli {
       ss.err.println("Failed with exception " + e.getClass().getName() + ":"
         + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
       ret = 1;
-    } catch (CommandNeedRetryException e) {
-      ss.err.println("Failed with exception " + e.getClass().getName() + ":"
-        + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      ret = 1;
     }
 
     int cret = driver.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
index 533f0bc..e112412 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -47,11 +46,7 @@ public class HCatDriver {
   public CommandProcessorResponse run(String command) {
 
     CommandProcessorResponse cpr = null;
-    try {
-      cpr = driver.run(command);
-    } catch (CommandNeedRetryException e) {
-      return new CommandProcessorResponse(-1, e.toString(), "");
-    }
+    cpr = driver.run(command);
 
     SessionState ss = SessionState.get();
 
@@ -153,7 +148,7 @@ public class HCatDriver {
     return driver.close();
   }
 
-  public boolean getResults(ArrayList<String> res) throws IOException, CommandNeedRetryException {
+  public boolean getResults(ArrayList<String> res) throws IOException {
     return driver.getResults(res);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
index 91d50df..d6386ab 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
@@ -22,11 +22,8 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -38,7 +35,6 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
-import org.apache.thrift.TException;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -79,7 +75,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testDescDB() throws CommandNeedRetryException, IOException {
+  public void testDescDB() throws Exception {
     hcatDriver.run("drop database mydb cascade");
     assertEquals(0, hcatDriver.run("create database mydb").getResponseCode());
     CommandProcessorResponse resp = hcatDriver.run("describe database mydb");
@@ -91,7 +87,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException {
+  public void testCreateTblWithLowerCasePartNames() throws Exception {
     driver.run("drop table junit_sem_analysis");
     CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
     assertEquals(resp.getResponseCode(), 0);
@@ -102,7 +98,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAlterTblFFpart() throws Exception {
 
     driver.run("drop table junit_sem_analysis");
     driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE");
@@ -124,13 +120,13 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testUsNonExistentDB() throws CommandNeedRetryException {
+  public void testUsNonExistentDB() throws Exception {
     CommandProcessorResponse resp = hcatDriver.run("use no_such_db");
     assertEquals(ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(), resp.getResponseCode());
   }
 
   @Test
-  public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
+  public void testDatabaseOperations() throws Exception {
 
     List<String> dbs = client.getAllDatabases();
     String testDb1 = "testdatabaseoperatons1";
@@ -158,7 +154,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testCreateTableIfNotExists() throws Exception {
 
     hcatDriver.run("drop table " + TBL_NAME);
     hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE");
@@ -183,7 +179,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTblTouch() throws CommandNeedRetryException {
+  public void testAlterTblTouch() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -197,7 +193,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testChangeColumns() throws CommandNeedRetryException {
+  public void testChangeColumns() throws Exception {
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
     CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int");
@@ -212,7 +208,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAddReplaceCols() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
@@ -234,7 +230,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTblClusteredBy() throws CommandNeedRetryException {
+  public void testAlterTblClusteredBy() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -244,7 +240,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTableRename() throws CommandNeedRetryException, TException {
+  public void testAlterTableRename() throws Exception {
     hcatDriver.run("drop table oldname");
     hcatDriver.run("drop table newname");
     hcatDriver.run("create table oldname (a int)");
@@ -264,7 +260,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAlterTableSetFF() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -285,7 +281,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddPartFail() throws CommandNeedRetryException {
+  public void testAddPartFail() throws Exception {
 
     driver.run("drop table junit_sem_analysis");
     driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -295,7 +291,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddPartPass() throws IOException, CommandNeedRetryException {
+  public void testAddPartPass() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -306,7 +302,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCTAS() throws CommandNeedRetryException {
+  public void testCTAS() throws Exception {
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) as select * from tbl2";
     CommandProcessorResponse response = hcatDriver.run(query);
@@ -316,7 +312,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testStoredAs() throws CommandNeedRetryException {
+  public void testStoredAs() throws Exception {
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int)";
     CommandProcessorResponse response = hcatDriver.run(query);
@@ -325,7 +321,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAddDriverInfo() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as " +
@@ -341,7 +337,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException {
+  public void testInvalidateNonStringPartition() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b int)  stored as RCFILE";
@@ -354,7 +350,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException {
+  public void testInvalidateSeqFileStoredAs() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as SEQUENCEFILE";
@@ -365,7 +361,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException {
+  public void testInvalidateTextFileStoredAs() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as TEXTFILE";
@@ -376,7 +372,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException {
+  public void testInvalidateClusteredBy() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
@@ -386,7 +382,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCTLFail() throws IOException, CommandNeedRetryException {
+  public void testCTLFail() throws Exception {
 
     driver.run("drop table junit_sem_analysis");
     driver.run("drop table like_table");
@@ -399,7 +395,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testCTLPass() throws Exception {
 
     try {
       hcatDriver.run("drop table junit_sem_analysis");

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
index 58f9086..a8aafb1 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
@@ -26,7 +26,6 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -55,7 +54,7 @@ public class TestUseDatabase extends TestCase {
   private final String dbName = "testUseDatabase_db";
   private final String tblName = "testUseDatabase_tbl";
 
-  public void testAlterTablePass() throws IOException, CommandNeedRetryException {
+  public void testAlterTablePass() throws Exception {
 
     hcatDriver.run("create database " + dbName);
     hcatDriver.run("use " + dbName);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
index 859da72..1c6ad9b 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
@@ -26,7 +26,6 @@ import java.util.Map.Entry;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -65,8 +64,7 @@ public class HCatDataCheckUtil {
     MiniCluster.createInputFile(cluster, fileName, input);
   }
 
-  public static void createTable(IDriver driver, String tableName, String createTableArgs)
-    throws CommandNeedRetryException, IOException {
+  public static void createTable(IDriver driver, String tableName, String createTableArgs) throws IOException {
     String createTable = "create table " + tableName + createTableArgs;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
@@ -74,12 +72,11 @@ public class HCatDataCheckUtil {
     }
   }
 
-  public static void dropTable(IDriver driver, String tablename) throws IOException, CommandNeedRetryException {
+  public static void dropTable(IDriver driver, String tablename) throws IOException {
     driver.run("drop table if exists " + tablename);
   }
 
-  public static ArrayList<String> formattedRun(IDriver driver, String name, String selectCmd)
-    throws CommandNeedRetryException, IOException {
+  public static ArrayList<String> formattedRun(IDriver driver, String name, String selectCmd) throws IOException {
     driver.run(selectCmd);
     ArrayList<String> src_values = new ArrayList<String>();
     driver.getResults(src_values);
@@ -91,7 +88,7 @@ public class HCatDataCheckUtil {
   public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
     return recordsEqual(first, second, null);
   }
-  public static boolean recordsEqual(HCatRecord first, HCatRecord second, 
+  public static boolean recordsEqual(HCatRecord first, HCatRecord second,
                                      StringBuilder debugDetail) {
     return (compareRecords(first, second, debugDetail) == 0);
   }
@@ -99,12 +96,12 @@ public class HCatDataCheckUtil {
   public static int compareRecords(HCatRecord first, HCatRecord second) {
     return compareRecords(first, second, null);
   }
-  public static int compareRecords(HCatRecord first, HCatRecord second, 
+  public static int compareRecords(HCatRecord first, HCatRecord second,
                                    StringBuilder debugDetail) {
     return compareRecordContents(first.getAll(), second.getAll(), debugDetail);
   }
 
-  public static int compareRecordContents(List<Object> first, List<Object> second, 
+  public static int compareRecordContents(List<Object> first, List<Object> second,
                                           StringBuilder debugDetail) {
     int mySz = first.size();
     int urSz = second.size();
@@ -118,7 +115,7 @@ public class HCatDataCheckUtil {
             String msg = "first.get(" + i + "}='" + first.get(i) + "' second.get(" +
                     i + ")='" + second.get(i) + "' compared as " + c + "\n" +
             "Types 1st/2nd=" + DataType.findType(first.get(i)) + "/" +DataType.findType(
-                    second.get(i)) + '\n' + 
+                    second.get(i)) + '\n' +
                     "first='" + first.get(i) + "' second='" + second.get(i) + "'";
             if(first.get(i) instanceof Date) {
               msg += "\n((Date)first.get(i)).getTime()=" + ((Date)first.get(i)).getTime();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
index 818e712..4224f1e 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
@@ -33,7 +33,6 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.transfer.DataTransferFactory;
 import org.apache.hive.hcatalog.data.transfer.HCatReader;
@@ -49,7 +48,7 @@ import org.junit.Test;
 public class TestReaderWriter extends HCatBaseTest {
 
   @Test
-  public void test() throws MetaException, CommandNeedRetryException,
+  public void test() throws Exception,
       IOException, ClassNotFoundException {
 
     driver.run("drop table mytbl");
@@ -98,7 +97,7 @@ public class TestReaderWriter extends HCatBaseTest {
     }
   }
 
-  private WriterContext runsInMaster(Map<String, String> config) throws HCatException {
+  private WriterContext runsInMaster(Map<String, String> config) throws Exception {
 
     WriteEntity.Builder builder = new WriteEntity.Builder();
     WriteEntity entity = builder.withTable("mytbl").build();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
index e0fc02e..14b22ed 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
@@ -42,7 +42,6 @@ import java.util.Properties;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -92,20 +91,20 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
     this.storageFormat = getStorageFormat();
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     dropTable(tablename, driver);
   }
 
-  static void dropTable(String tablename, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void dropTable(String tablename, IDriver driver) throws Exception {
     driver.run("drop table if exists " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
   static void createTable(String tablename, String schema, String partitionedBy, IDriver driver, String storageFormat)
-      throws IOException, CommandNeedRetryException {
+      throws Exception {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
     if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
@@ -117,7 +116,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
     executeStatementOnDriver(createTable, driver);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -125,7 +124,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws Exception {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {
@@ -332,7 +331,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
   }
 
   @Test
-  public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
+  public void testReadPartitionedBasic() throws Exception {
     PigServer server = createPigServer(false);
 
     driver.run("select * from " + PARTITIONED_TABLE);
@@ -399,7 +398,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
   }
 
   @Test
-  public void testReadMissingPartitionBasicNeg() throws IOException, CommandNeedRetryException {
+  public void testReadMissingPartitionBasicNeg() throws Exception {
     PigServer server = createPigServer(false);
 
     File removedPartitionDir = new File(TEST_WAREHOUSE_DIR + "/" + PARTITIONED_TABLE + "/bkt=0");

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
index 40581e6..30b0047 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
@@ -35,7 +35,6 @@ import java.util.List;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
@@ -416,7 +415,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testPartColsInData() throws IOException, CommandNeedRetryException {
+  public void testPartColsInData() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int", "b string", driver, storageFormat);
 
@@ -539,7 +538,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testNoAlias() throws IOException, CommandNeedRetryException {
+  public void testNoAlias() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_parted", driver);
     AbstractHCatLoaderTest.createTable("junit_parted","a int, b string", "ds string", driver, storageFormat);
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -583,7 +582,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+  public void testStoreMultiTables() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -634,7 +633,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoSchema() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -670,7 +669,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoCtorArgs() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -706,7 +705,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testEmptyStore() throws IOException, CommandNeedRetryException {
+  public void testEmptyStore() throws Exception {
 
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null, driver, storageFormat);
@@ -739,7 +738,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testBagNStruct() throws IOException, CommandNeedRetryException {
+  public void testBagNStruct() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted",
         "b string,a struct<a1:int>,  arr_of_struct array<string>, " +
@@ -781,7 +780,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncAllSimpleTypes() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted",
         "a int, b float, c double, d bigint, e string, h boolean, f binary, g binary", null,
@@ -840,7 +839,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncSimple() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -878,8 +877,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws Exception {
     AbstractHCatLoaderTest.dropTable("employee", driver);
     AbstractHCatLoaderTest.createTable("employee",
         "emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING",
@@ -912,8 +910,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws Exception {
     AbstractHCatLoaderTest.dropTable("employee", driver);
     AbstractHCatLoaderTest.createTable("employee",
         "emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING",
@@ -945,8 +942,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws Exception {
     AbstractHCatLoaderTest.dropTable("employee", driver);
     AbstractHCatLoaderTest.createTable("employee",
         "emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING",
@@ -972,7 +968,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testPartitionPublish() throws IOException, CommandNeedRetryException {
+  public void testPartitionPublish() throws Exception {
     AbstractHCatLoaderTest.dropTable("ptn_fail", driver);
     AbstractHCatLoaderTest.createTable("ptn_fail","a int, c string", "b string",
         driver, storageFormat);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
index e449729..e0f1c89 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -108,15 +107,16 @@ public class TestE2EScenarios {
     }
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     driver.run("drop table " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy, String storageFormat) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy, String storageFormat)
+      throws Exception {
    AbstractHCatLoaderTest.createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  private void driverRun(String cmd) throws IOException, CommandNeedRetryException {
+  private void driverRun(String cmd) throws Exception {
     int retCode = driver.run(cmd).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to run ["

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
index 411c165..9cb1477 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
@@ -18,7 +18,10 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import java.io.IOException;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assume.assumeTrue;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -31,15 +34,11 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-
-import org.apache.hadoop.util.Shell;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -51,19 +50,13 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
-
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assume.assumeTrue;
-
 @RunWith(Parameterized.class)
 public class TestHCatLoaderComplexSchema {
 
@@ -93,15 +86,15 @@ public class TestHCatLoaderComplexSchema {
     this.storageFormat = storageFormat;
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     driver.run("drop table " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     AbstractHCatLoaderTest.createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -209,12 +202,13 @@ public class TestHCatLoaderComplexSchema {
     verifyWriteRead("testSyntheticComplexSchema2", pigSchema, tableSchema2, data, false);
   }
 
-  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data, boolean provideSchemaToStorer)
-    throws IOException, CommandNeedRetryException, ExecException, FrontendException {
+  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data,
+      boolean provideSchemaToStorer) throws Exception {
     verifyWriteRead(tablename, pigSchema, tableSchema, data, data, provideSchemaToStorer);
   }
-  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data, List<Tuple> result, boolean provideSchemaToStorer)
-    throws IOException, CommandNeedRetryException, ExecException, FrontendException {
+
+  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data,
+      List<Tuple> result, boolean provideSchemaToStorer) throws Exception {
     MockLoader.setData(tablename + "Input", data);
     try {
       createTable(tablename, tableSchema);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index b70a952..1560571 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -18,6 +18,24 @@
  */
 package org.apache.hive.hcatalog.pig;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -27,7 +45,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
@@ -60,24 +77,6 @@ import org.junit.runners.Parameterized;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
-
 @RunWith(Parameterized.class)
 public class TestHCatLoaderEncryption {
   private static final AtomicInteger salt = new AtomicInteger(new Random().nextInt());
@@ -112,20 +111,20 @@ public class TestHCatLoaderEncryption {
     this.storageFormat = storageFormat;
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     dropTable(tablename, driver);
   }
 
-  static void dropTable(String tablename, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void dropTable(String tablename, IDriver driver) throws Exception {
     driver.run("drop table if exists " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
   static void createTable(String tablename, String schema, String partitionedBy, IDriver driver, String storageFormat)
-      throws IOException, CommandNeedRetryException {
+      throws Exception {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
     if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
@@ -135,7 +134,7 @@ public class TestHCatLoaderEncryption {
     executeStatementOnDriver(createTable, driver);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -143,7 +142,7 @@ public class TestHCatLoaderEncryption {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws Exception {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {
@@ -173,7 +172,7 @@ public class TestHCatLoaderEncryption {
     String s = hiveConf.get("hdfs.minidfs.basedir");
     if(s == null || s.length() <= 0) {
       //return System.getProperty("test.build.data", "build/test/data") + "/dfs/";
-      hiveConf.set("hdfs.minidfs.basedir", 
+      hiveConf.set("hdfs.minidfs.basedir",
         System.getProperty("test.build.data", "build/test/data") + "_" + System.currentTimeMillis() +
           "_" + salt.getAndIncrement() + "/dfs/");
     }
@@ -237,12 +236,14 @@ public class TestHCatLoaderEncryption {
     }
   }
 
-  private void associateEncryptionZoneWithPath(String path) throws SQLException, CommandNeedRetryException {
+  private void associateEncryptionZoneWithPath(String path) throws Exception {
     LOG.info(this.storageFormat + ": associateEncryptionZoneWithPath");
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     enableTestOnlyCmd(SessionState.get().getConf());
     CommandProcessor crypto = getTestCommand("crypto");
-    if (crypto == null) return;
+    if (crypto == null) {
+      return;
+    }
     checkExecutionResponse(crypto.run("CREATE_KEY --keyName key_128 --bitLength 128"));
     checkExecutionResponse(crypto.run("CREATE_ZONE --keyName key_128 --path " + path));
   }
@@ -255,7 +256,7 @@ public class TestHCatLoaderEncryption {
     assertEquals("Crypto command failed with the exit code" + rc, 0, rc);
   }
 
-  private void removeEncryptionZone() throws SQLException, CommandNeedRetryException {
+  private void removeEncryptionZone() throws Exception {
     LOG.info(this.storageFormat + ": removeEncryptionZone");
     enableTestOnlyCmd(SessionState.get().getConf());
     CommandProcessor crypto = getTestCommand("crypto");
@@ -394,7 +395,8 @@ public class TestHCatLoaderEncryption {
     }
   }
 
-  static void createTableInSpecifiedPath(String tableName, String schema, String path, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void createTableInSpecifiedPath(String tableName, String schema, String path, IDriver driver)
+      throws Exception {
     String createTableStr;
     createTableStr = "create table " + tableName + "(" + schema + ") location \'" + path + "\'";
     executeStatementOnDriver(createTableStr, driver);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
index 51ddd90..477ea66 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
@@ -20,14 +20,12 @@ package org.apache.hive.hcatalog.pig;
 
 import static org.junit.Assume.assumeTrue;
 
-import java.io.IOException;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.junit.Test;
@@ -190,7 +188,7 @@ public class TestHCatStorer extends AbstractHCatStorerTest {
 
   @Test
   @Override
-  public void testPartColsInData() throws IOException, CommandNeedRetryException {
+  public void testPartColsInData() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testPartColsInData();
   }
@@ -211,87 +209,84 @@ public class TestHCatStorer extends AbstractHCatStorerTest {
 
   @Test
   @Override
-  public void testNoAlias() throws IOException, CommandNeedRetryException {
+  public void testNoAlias() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testNoAlias();
   }
 
   @Test
   @Override
-  public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+  public void testStoreMultiTables() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreMultiTables();
   }
 
   @Test
   @Override
-  public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoSchema() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreWithNoSchema();
   }
 
   @Test
   @Override
-  public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoCtorArgs() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreWithNoCtorArgs();
   }
 
   @Test
   @Override
-  public void testEmptyStore() throws IOException, CommandNeedRetryException {
+  public void testEmptyStore() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testEmptyStore();
   }
 
   @Test
   @Override
-  public void testBagNStruct() throws IOException, CommandNeedRetryException {
+  public void testBagNStruct() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testBagNStruct();
   }
 
   @Test
   @Override
-  public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncAllSimpleTypes() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreFuncAllSimpleTypes();
   }
 
   @Test
   @Override
-  public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncSimple() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreFuncSimple();
   }
 
   @Test
   @Override
-  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testDynamicPartitioningMultiPartColsInDataPartialSpec();
   }
 
   @Test
   @Override
-  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testDynamicPartitioningMultiPartColsInDataNoSpec();
   }
 
   @Test
   @Override
-  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testDynamicPartitioningMultiPartColsNoDataInDataNoSpec();
   }
 
   @Test
   @Override
-  public void testPartitionPublish() throws IOException, CommandNeedRetryException {
+  public void testPartitionPublish() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testPartitionPublish();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
index 3cadea4..d6b3ebc 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
@@ -30,7 +30,6 @@ import java.util.Set;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
@@ -79,15 +78,15 @@ public class TestHCatStorerMulti {
     this.storageFormat = storageFormat;
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     driver.run("drop table " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     AbstractHCatLoaderTest.createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -212,7 +211,7 @@ public class TestHCatStorerMulti {
     writer.close();
   }
 
-  private void cleanup() throws IOException, CommandNeedRetryException {
+  private void cleanup() throws Exception {
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
index aac2002..0ffab4a 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.UUID;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-
 import org.apache.hive.hcatalog.HcatTestUtils;
 
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
@@ -47,7 +45,7 @@ public class TestHCatStorerWrapper extends HCatBaseTest {
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
   @Test
-  public void testStoreExternalTableWithExternalDir() throws IOException, CommandNeedRetryException{
+  public void testStoreExternalTableWithExternalDir() throws Exception {
 
     File tmpExternalDir = new File(TEST_DATA_DIR, UUID.randomUUID().toString());
     tmpExternalDir.deleteOnExit();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
index b98e1a7..9b51524 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
@@ -18,12 +18,7 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import java.io.IOException;
-
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.io.IOConstants;
-import org.junit.Ignore;
-import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
----------------------------------------------------------------------
diff --git a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
index 61787c9..729a5e7 100644
--- a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
+++ b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
@@ -37,7 +37,6 @@ import org.apache.activemq.broker.BrokerService;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -114,7 +113,7 @@ public class TestMsgBusConnection {
     assertEquals("testconndb", messageObject.getDB());
   }
 
-  private void runQuery(String query) throws CommandNeedRetryException {
+  private void runQuery(String query) throws Exception {
     CommandProcessorResponse cpr = driver.run(query);
     assertFalse(cpr.getMessage(), cpr.failed());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index 8943423..3388a34 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -55,7 +54,6 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Information about the hive end point (i.e. table or partition) to write to.
@@ -102,6 +100,7 @@ public class HiveEndPoint {
   /**
    * @deprecated As of release 1.3/2.1.  Replaced by {@link #newConnection(boolean, String)}
    */
+  @Deprecated
   public StreamingConnection newConnection(final boolean createPartIfNotExists)
     throws ConnectionError, InvalidPartition, InvalidTable, PartitionCreationFailed
     , ImpersonationFailed , InterruptedException {
@@ -110,6 +109,7 @@ public class HiveEndPoint {
   /**
    * @deprecated As of release 1.3/2.1.  Replaced by {@link #newConnection(boolean, HiveConf, String)}
    */
+  @Deprecated
   public StreamingConnection newConnection(final boolean createPartIfNotExists, HiveConf conf)
     throws ConnectionError, InvalidPartition, InvalidTable, PartitionCreationFailed
     , ImpersonationFailed , InterruptedException {
@@ -118,6 +118,7 @@ public class HiveEndPoint {
   /**
    * @deprecated As of release 1.3/2.1.  Replaced by {@link #newConnection(boolean, HiveConf, UserGroupInformation, String)}
    */
+  @Deprecated
   public StreamingConnection newConnection(final boolean createPartIfNotExists, final HiveConf conf,
                                            final UserGroupInformation authenticatedUser)
     throws ConnectionError, InvalidPartition,
@@ -232,7 +233,9 @@ public class HiveEndPoint {
 
   @Override
   public boolean equals(Object o) {
-    if (this == o) return true;
+    if (this == o) {
+      return true;
+    }
     if (o == null || getClass() != o.getClass()) {
       return false;
     }
@@ -412,6 +415,7 @@ public class HiveEndPoint {
      * @throws ImpersonationFailed failed to run command as proxyUser
      * @throws InterruptedException
      */
+    @Override
     public TransactionBatch fetchTransactionBatch(final int numTransactions,
                                                       final RecordWriter recordWriter)
             throws StreamingException, TransactionBatchUnAvailable, ImpersonationFailed
@@ -490,22 +494,11 @@ public class HiveEndPoint {
     }
 
     private static boolean runDDL(IDriver driver, String sql) throws QueryFailedException {
-      int retryCount = 1; // # of times to retry if first attempt fails
-      for (int attempt=0; attempt<=retryCount; ++attempt) {
-        try {
-          if (LOG.isDebugEnabled()) {
-            LOG.debug("Running Hive Query: "+ sql);
-          }
-          driver.run(sql);
-          return true;
-        } catch (CommandNeedRetryException e) {
-          if (attempt==retryCount) {
-            throw new QueryFailedException(sql, e);
-          }
-          continue;
-        }
-      } // for
-      return false;
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Running Hive Query: " + sql);
+      }
+      driver.run(sql);
+      return true;
     }
 
     private static String partSpecStr(List<FieldSchema> partKeys, ArrayList<String> partVals) {
@@ -687,9 +680,10 @@ public class HiveEndPoint {
 
     private void beginNextTransactionImpl() throws TransactionError {
       state = TxnState.INACTIVE;//clear state from previous txn
-      if ( currentTxnIndex + 1 >= txnIds.size() )
+      if ( currentTxnIndex + 1 >= txnIds.size() ) {
         throw new InvalidTrasactionState("No more transactions available in" +
                 " current batch for end point : " + endPt);
+      }
       ++currentTxnIndex;
       state = TxnState.OPEN;
       lastTxnUsed = getCurrentTxnId();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
index b41e85f..f78be7f 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
@@ -18,11 +18,10 @@
 
 package org.apache.hive.hcatalog.streaming;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-
 public class QueryFailedException extends StreamingException {
   String query;
-  public QueryFailedException(String query, CommandNeedRetryException e) {
+
+  public QueryFailedException(String query, Exception e) {
     super("Query failed: " + query + ". Due to :" + e.getMessage(), e);
     this.query = query;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index 5e12614..4e92812 100644
--- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -66,7 +66,6 @@ import org.apache.hadoop.hive.metastore.api.TxnState;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
@@ -538,6 +537,7 @@ public class TestStreaming {
    * @deprecated use {@link #checkDataWritten2(Path, long, long, int, String, boolean, String...)} -
    * there is little value in using InputFormat directly
    */
+  @Deprecated
   private void checkDataWritten(Path partitionPath, long minTxn, long maxTxn, int buckets, int numExpectedFiles,
                                 String... records) throws Exception {
     ValidTxnList txns = msClient.getValidTxns();
@@ -546,15 +546,21 @@ public class TestStreaming {
     Assert.assertEquals(0, dir.getOriginalFiles().size());
     List<AcidUtils.ParsedDelta> current = dir.getCurrentDirectories();
     System.out.println("Files found: ");
-    for (AcidUtils.ParsedDelta pd : current) System.out.println(pd.getPath().toString());
+    for (AcidUtils.ParsedDelta pd : current) {
+      System.out.println(pd.getPath().toString());
+    }
     Assert.assertEquals(numExpectedFiles, current.size());
 
     // find the absolute minimum transaction
     long min = Long.MAX_VALUE;
     long max = Long.MIN_VALUE;
     for (AcidUtils.ParsedDelta pd : current) {
-      if (pd.getMaxTransaction() > max) max = pd.getMaxTransaction();
-      if (pd.getMinTransaction() < min) min = pd.getMinTransaction();
+      if (pd.getMaxTransaction() > max) {
+        max = pd.getMaxTransaction();
+      }
+      if (pd.getMinTransaction() < min) {
+        min = pd.getMinTransaction();
+      }
     }
     Assert.assertEquals(minTxn, min);
     Assert.assertEquals(maxTxn, max);
@@ -593,15 +599,21 @@ public class TestStreaming {
     Assert.assertEquals(0, dir.getOriginalFiles().size());
     List<AcidUtils.ParsedDelta> current = dir.getCurrentDirectories();
     System.out.println("Files found: ");
-    for (AcidUtils.ParsedDelta pd : current) System.out.println(pd.getPath().toString());
+    for (AcidUtils.ParsedDelta pd : current) {
+      System.out.println(pd.getPath().toString());
+    }
     Assert.assertEquals(numExpectedFiles, current.size());
 
     // find the absolute minimum transaction
     long min = Long.MAX_VALUE;
     long max = Long.MIN_VALUE;
     for (AcidUtils.ParsedDelta pd : current) {
-      if (pd.getMaxTransaction() > max) max = pd.getMaxTransaction();
-      if (pd.getMinTransaction() < min) min = pd.getMinTransaction();
+      if (pd.getMaxTransaction() > max) {
+        max = pd.getMaxTransaction();
+      }
+      if (pd.getMinTransaction() < min) {
+        min = pd.getMinTransaction();
+      }
     }
     Assert.assertEquals(minTxn, min);
     Assert.assertEquals(maxTxn, max);
@@ -811,7 +823,7 @@ public class TestStreaming {
         txnBatch.heartbeat();
       }
     }
-    
+
   }
   @Test
   public void testTransactionBatchEmptyAbort() throws Exception {
@@ -978,7 +990,7 @@ public class TestStreaming {
       , txnBatch.getCurrentTransactionState());
     connection.close();
   }
-  
+
   @Test
   public void testTransactionBatchCommit_Json() throws Exception {
     HiveEndPoint endPt = new HiveEndPoint(metaStoreURI, dbName, tblName,
@@ -2024,7 +2036,7 @@ public class TestStreaming {
     }
     Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"),
       expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
-    
+
     r = msClient.showTxns();
     Assert.assertEquals("HWM didn't match", 21, r.getTxn_high_water_mark());
     ti = r.getOpen_txns();
@@ -2041,12 +2053,14 @@ public class TestStreaming {
     HashMap<Integer, ArrayList<SampleRec>> result = new HashMap<Integer, ArrayList<SampleRec>>();
 
     for (File deltaDir : new File(dbLocation + "/" + tableName).listFiles()) {
-      if(!deltaDir.getName().startsWith("delta"))
+      if(!deltaDir.getName().startsWith("delta")) {
         continue;
+      }
       File[] bucketFiles = deltaDir.listFiles();
       for (File bucketFile : bucketFiles) {
-        if(bucketFile.toString().endsWith("length"))
+        if(bucketFile.toString().endsWith("length")) {
           continue;
+        }
         Integer bucketNum = getBucketNumber(bucketFile);
         ArrayList<SampleRec>  recs = dumpBucket(new Path(bucketFile.toString()));
         result.put(bucketNum, recs);
@@ -2106,14 +2120,15 @@ public class TestStreaming {
     return new Path(tableLoc);
   }
 
-  private static Path addPartition(IDriver driver, String tableName, List<String> partVals, String[] partNames) throws QueryFailedException, CommandNeedRetryException, IOException {
+  private static Path addPartition(IDriver driver, String tableName, List<String> partVals, String[] partNames)
+      throws Exception {
     String partSpec = getPartsSpec(partNames, partVals);
     String addPart = "alter table " + tableName + " add partition ( " + partSpec  + " )";
     runDDL(driver, addPart);
     return getPartitionPath(driver, tableName, partSpec);
   }
 
-  private static Path getPartitionPath(IDriver driver, String tableName, String partSpec) throws CommandNeedRetryException, IOException {
+  private static Path getPartitionPath(IDriver driver, String tableName, String partSpec) throws Exception {
     ArrayList<String> res = queryTable(driver, "describe extended " + tableName + " PARTITION (" + partSpec + ")");
     String partInfo = res.get(res.size() - 1);
     int start = partInfo.indexOf("location:") + "location:".length();
@@ -2160,8 +2175,9 @@ public class TestStreaming {
   }
 
   private static String join(String[] values, String delimiter) {
-    if(values==null)
+    if(values==null) {
       return null;
+    }
     StringBuilder strbuf = new StringBuilder();
 
     boolean first = true;
@@ -2183,28 +2199,17 @@ public class TestStreaming {
   private static boolean runDDL(IDriver driver, String sql) throws QueryFailedException {
     LOG.debug(sql);
     System.out.println(sql);
-    int retryCount = 1; // # of times to retry if first attempt fails
-    for (int attempt=0; attempt <= retryCount; ++attempt) {
-      try {
-        //LOG.debug("Running Hive Query: "+ sql);
-        CommandProcessorResponse cpr = driver.run(sql);
-        if(cpr.getResponseCode() == 0) {
-          return true;
-        }
-        LOG.error("Statement: " + sql + " failed: " + cpr);
-      } catch (CommandNeedRetryException e) {
-        if (attempt == retryCount) {
-          throw new QueryFailedException(sql, e);
-        }
-        continue;
-      }
-    } // for
+    //LOG.debug("Running Hive Query: "+ sql);
+    CommandProcessorResponse cpr = driver.run(sql);
+    if (cpr.getResponseCode() == 0) {
+      return true;
+    }
+    LOG.error("Statement: " + sql + " failed: " + cpr);
     return false;
   }
 
 
-  public static ArrayList<String> queryTable(IDriver driver, String query)
-          throws CommandNeedRetryException, IOException {
+  public static ArrayList<String> queryTable(IDriver driver, String query) throws IOException {
     CommandProcessorResponse cpr = driver.run(query);
     if(cpr.getResponseCode() != 0) {
       throw new RuntimeException(query + " failed: " + cpr);
@@ -2227,13 +2232,21 @@ public class TestStreaming {
 
     @Override
     public boolean equals(Object o) {
-      if (this == o) return true;
-      if (o == null || getClass() != o.getClass()) return false;
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
 
       SampleRec that = (SampleRec) o;
 
-      if (field2 != that.field2) return false;
-      if (field1 != null ? !field1.equals(that.field1) : that.field1 != null) return false;
+      if (field2 != that.field2) {
+        return false;
+      }
+      if (field1 != null ? !field1.equals(that.field1) : that.field1 != null) {
+        return false;
+      }
       return !(field3 != null ? !field3.equals(that.field3) : that.field3 != null);
 
     }