You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2018/02/07 08:52:08 UTC

[1/3] hive git commit: HIVE-18523: Fix summary row in case there are no inputs (Zoltan Haindrich reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 2422e1808 -> f7dea1060


HIVE-18523: Fix summary row in case there are no inputs (Zoltan Haindrich reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f942e72a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f942e72a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f942e72a

Branch: refs/heads/master
Commit: f942e72abf450f1f00f27261532a54f5f81d5170
Parents: 2422e18
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Wed Feb 7 09:35:11 2018 +0100
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Wed Feb 7 09:35:11 2018 +0100

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/GroupByOperator.java    | 25 +++++-----
 .../hadoop/hive/ql/exec/IConfigureJobConf.java  | 30 ++++++++++++
 .../apache/hadoop/hive/ql/exec/Utilities.java   |  4 +-
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      |  1 +
 .../hadoop/hive/ql/exec/mr/ExecMapRunner.java   | 40 ++++++++++++++++
 .../hadoop/hive/ql/exec/mr/ExecMapper.java      | 18 ++++---
 .../hive/ql/exec/tez/HiveSplitGenerator.java    |  1 -
 .../ql/exec/vector/VectorGroupByOperator.java   | 40 ++++++++++------
 .../hive/ql/io/CombineHiveInputFormat.java      | 10 +++-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |  9 ++++
 .../hadoop/hive/ql/io/NullRowsInputFormat.java  | 14 ++++--
 .../org/apache/hadoop/hive/ql/plan/MapWork.java | 49 ++++++++++++--------
 .../apache/hadoop/hive/ql/plan/ReduceWork.java  |  7 ---
 .../clientpositive/groupby_rollup_empty.q       |  7 +++
 .../clientpositive/groupby_rollup_empty.q.out   | 28 +++++++++++
 .../llap/groupby_rollup_empty.q.out             | 28 +++++++++++
 16 files changed, 245 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
index 6a0f0de..6de979e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
@@ -36,13 +36,13 @@ import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.LlapDaemonInfo;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
-import org.apache.hadoop.hive.ql.exec.tez.TezContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
 import org.apache.hadoop.hive.ql.plan.AggregationDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.GroupByDesc.Mode;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
@@ -67,13 +67,14 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
 
 import javolution.util.FastBitSet;
 
 /**
  * GroupBy operator implementation.
  */
-public class GroupByOperator extends Operator<GroupByDesc> {
+public class GroupByOperator extends Operator<GroupByDesc> implements IConfigureJobConf {
 
   private static final long serialVersionUID = 1L;
   private static final int NUMROWSESTIMATESIZE = 1000;
@@ -1164,14 +1165,15 @@ public class GroupByOperator extends Operator<GroupByDesc> {
   }
 
   public static boolean shouldEmitSummaryRow(GroupByDesc desc) {
-    // exactly one reducer should emit the summary row
-    if (!firstReducer()) {
-      return false;
-    }
     // empty keyset is basically ()
     if (desc.getKeys().size() == 0) {
       return true;
     }
+
+    if (desc.getMode() != Mode.HASH && desc.getMode() != Mode.COMPLETE && desc.getMode() != Mode.PARTIAL1) {
+      return false;
+    }
+
     int groupingSetPosition = desc.getGroupingSetPosition();
     List<Integer> listGroupingSets = desc.getListGroupingSets();
     // groupingSets are known at map/reducer side; but have to do real processing
@@ -1185,13 +1187,12 @@ public class GroupByOperator extends Operator<GroupByDesc> {
     return false;
   }
 
-  public static boolean firstReducer() {
-    MapredContext ctx = TezContext.get();
-    if (ctx != null && ctx instanceof TezContext) {
-      TezContext tezContext = (TezContext) ctx;
-      return tezContext.getTezProcessorContext().getTaskIndex() == 0;
+  @Override
+  public void configureJobConf(JobConf job) {
+    // only needed when grouping sets are present
+    if (conf.getGroupingSetPosition() > 0 && shouldEmitSummaryRow(conf)) {
+      job.setBoolean(Utilities.ENSURE_OPERATORS_EXECUTED, true);
     }
-    return true;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/IConfigureJobConf.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/IConfigureJobConf.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/IConfigureJobConf.java
new file mode 100644
index 0000000..dd6da67
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/IConfigureJobConf.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Enables an operator to be able to make changes to the {@link JobConf}.
+ *
+ * Invoked during compilation phase only.
+ */
+public interface IConfigureJobConf {
+  void configureJobConf(JobConf job);
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 941dd58..675ca12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -52,7 +52,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -244,6 +243,7 @@ public final class Utilities {
   public static final String USE_VECTORIZED_INPUT_FILE_FORMAT = "USE_VECTORIZED_INPUT_FILE_FORMAT";
   public static final String MAPNAME = "Map ";
   public static final String REDUCENAME = "Reducer ";
+  public static final String ENSURE_OPERATORS_EXECUTED = "ENSURE_OPERATORS_EXECUTED";
 
   @Deprecated
   protected static final String DEPRECATED_MAPRED_DFSCLIENT_PARALLELISM_MAX = "mapred.dfsclient.parallelism.max";
@@ -361,7 +361,7 @@ public final class Utilities {
   }
 
   public static BaseWork getMergeWork(Configuration jconf) {
-    String currentMergePrefix = jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX); 
+    String currentMergePrefix = jconf.get(DagUtils.TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX);
     if (StringUtils.isEmpty(currentMergePrefix)) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index 976b537..b436e80 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -256,6 +256,7 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
     //See the javadoc on HiveOutputFormatImpl and HadoopShims.prepareJobOutput()
     job.setOutputFormat(HiveOutputFormatImpl.class);
 
+    job.setMapRunnerClass(ExecMapRunner.class);
     job.setMapperClass(ExecMapper.class);
 
     job.setMapOutputKeyClass(HiveKey.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapRunner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapRunner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapRunner.java
new file mode 100644
index 0000000..d8f8c2a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapRunner.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.mr;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.MapRunner;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+public class ExecMapRunner<K1, V1, K2, V2> extends MapRunner<K1, V1, K2, V2> {
+  @Override
+  public void run(RecordReader<K1, V1> input, OutputCollector<K2, V2> output, Reporter reporter) throws IOException {
+    Mapper<K1, V1, K2, V2> mapper = getMapper();
+    if (mapper instanceof ExecMapper) {
+      ExecMapper execMapper = (ExecMapper) mapper;
+      execMapper.ensureOutputInitialize(output, reporter);
+    }
+    super.run(input, output, reporter);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
index 150382a..99b33a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
@@ -141,13 +141,7 @@ public class ExecMapper extends MapReduceBase implements Mapper {
   @Override
   public void map(Object key, Object value, OutputCollector output,
       Reporter reporter) throws IOException {
-    if (oc == null) {
-      oc = output;
-      rp = reporter;
-      OperatorUtils.setChildrenCollector(mo.getChildOperators(), output);
-      mo.setReporter(rp);
-      MapredContext.get().setReporter(reporter);
-    }
+    ensureOutputInitialize(output, reporter);
     // reset the execContext for each new row
     execContext.resetRow();
 
@@ -171,6 +165,16 @@ public class ExecMapper extends MapReduceBase implements Mapper {
     }
   }
 
+  public void ensureOutputInitialize(OutputCollector output, Reporter reporter) {
+    if (oc == null) {
+      oc = output;
+      rp = reporter;
+      OperatorUtils.setChildrenCollector(mo.getChildOperators(), output);
+      mo.setReporter(rp);
+      MapredContext.get().setReporter(reporter);
+    }
+  }
+
   @Override
   public void close() {
     // No row was processed

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
index 98f4bc0..f3aa151 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
@@ -29,7 +29,6 @@ import java.util.Set;
 
 import com.google.common.base.Preconditions;
 
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.tez.common.counters.TezCounters;

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
index 45d809a..e670409 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
@@ -35,8 +35,10 @@ import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
+import org.apache.hadoop.hive.ql.exec.IConfigureJobConf;
 import org.apache.hadoop.hive.ql.exec.KeyWrapper;
 import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
@@ -55,6 +57,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.mapred.JobConf;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -69,7 +72,7 @@ import com.google.common.base.Preconditions;
  *
  */
 public class VectorGroupByOperator extends Operator<GroupByDesc>
-    implements VectorizationOperator, VectorizationContextRegion {
+    implements VectorizationOperator, VectorizationContextRegion, IConfigureJobConf {
 
   private static final Logger LOG = LoggerFactory.getLogger(
       VectorGroupByOperator.class.getName());
@@ -447,6 +450,20 @@ public class VectorGroupByOperator extends Operator<GroupByDesc>
       if (!aborted) {
         flush(true);
       }
+      if (!aborted && sumBatchSize == 0 && GroupByOperator.shouldEmitSummaryRow(conf)) {
+        // in case the empty grouping set is preset; but no output has done
+        // the "summary row" still needs to be emitted
+        VectorHashKeyWrapper kw = keyWrappersBatch.getVectorHashKeyWrappers()[0];
+        kw.setNull();
+        int pos = conf.getGroupingSetPosition();
+        if (pos >= 0) {
+          long val = (1 << pos) - 1;
+          keyWrappersBatch.setLongValue(kw, pos, val);
+        }
+        VectorAggregationBufferRow groupAggregators = allocateAggregationBuffer();
+        writeSingleRow(kw, groupAggregators);
+      }
+
     }
 
     /**
@@ -777,7 +794,6 @@ public class VectorGroupByOperator extends Operator<GroupByDesc>
 
     private boolean first;
     private boolean isLastGroupBatch;
-    private boolean hasOutput;
 
     /**
      * The group vector key helper.
@@ -820,7 +836,6 @@ public class VectorGroupByOperator extends Operator<GroupByDesc>
     @Override
     public void doProcessBatch(VectorizedRowBatch batch, boolean isFirstGroupingSet,
         boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException {
-      hasOutput = true;
       if (first) {
         // Copy the group key to output batch now.  We'll copy in the aggregates at the end of the group.
         first = false;
@@ -849,16 +864,6 @@ public class VectorGroupByOperator extends Operator<GroupByDesc>
       if (!aborted && !first && !isLastGroupBatch) {
         writeGroupRow(groupAggregators, buffer);
       }
-      if (!hasOutput && GroupByOperator.shouldEmitSummaryRow(conf)) {
-        VectorHashKeyWrapper kw = keyWrappersBatch.getVectorHashKeyWrappers()[0];
-        kw.setNull();
-        int pos = conf.getGroupingSetPosition();
-        if (pos >= 0) {
-          long val = (1 << pos) - 1;
-          keyWrappersBatch.setLongValue(kw, pos, val);
-        }
-        writeSingleRow(kw , groupAggregators);
-      }
     }
   }
 
@@ -1216,4 +1221,13 @@ public class VectorGroupByOperator extends Operator<GroupByDesc>
   public VectorDesc getVectorDesc() {
     return vectorDesc;
   }
+
+  @Override
+  public void configureJobConf(JobConf job) {
+    // only needed when grouping sets are present
+    if (conf.getGroupingSetPosition() > 0 && GroupByOperator.shouldEmitSummaryRow(conf)) {
+      job.setBoolean(Utilities.ENSURE_OPERATORS_EXECUTED, true);
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
index e4dfc00..b698987 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
@@ -464,9 +464,8 @@ public class CombineHiveInputFormat<K extends WritableComparable, V extends Writ
       CombineHiveInputSplit csplit = new CombineHiveInputSplit(job, is, pathToPartitionInfo);
       result.add(csplit);
     }
-
     LOG.info("number of splits " + result.size());
-    return result.toArray(new CombineHiveInputSplit[result.size()]);
+    return result.toArray(new InputSplit[result.size()]);
   }
 
   /**
@@ -578,6 +577,13 @@ public class CombineHiveInputFormat<K extends WritableComparable, V extends Writ
     // clear work from ThreadLocal after splits generated in case of thread is reused in pool.
     Utilities.clearWorkMapForConf(job);
 
+    if (result.isEmpty() && paths.length > 0 && job.getBoolean(Utilities.ENSURE_OPERATORS_EXECUTED, false)) {
+      // If there are no inputs; the Execution engine skips the operator tree.
+      // To prevent it from happening; an opaque  ZeroRows input is added here - when needed.
+      result.add(
+          new HiveInputSplit(new NullRowsInputFormat.DummyInputSplit(paths[0]), ZeroRowsInputFormat.class.getName()));
+    }
+
     LOG.info("Number of all splits " + result.size());
     perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.GET_SPLITS);
     return result.toArray(new InputSplit[result.size()]);

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index c3b846c..856b026 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -209,6 +209,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     }
   }
 
+  @Override
   public void configure(JobConf job) {
     this.job = job;
   }
@@ -367,6 +368,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     return instance;
   }
 
+  @Override
   public RecordReader getRecordReader(InputSplit split, JobConf job,
       Reporter reporter) throws IOException {
     HiveInputSplit hsplit = (HiveInputSplit) split;
@@ -500,6 +502,12 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     for (InputSplit is : iss) {
       result.add(new HiveInputSplit(is, inputFormatClass.getName()));
     }
+    if (iss.length == 0 && finalDirs.length > 0 && conf.getBoolean(Utilities.ENSURE_OPERATORS_EXECUTED, false)) {
+      // If there are no inputs; the Execution engine skips the operator tree.
+      // To prevent it from happening; an opaque  ZeroRows input is added here - when needed.
+      result.add(new HiveInputSplit(new NullRowsInputFormat.DummyInputSplit(finalDirs[0].toString()),
+          ZeroRowsInputFormat.class.getName()));
+    }
   }
 
   public static Path[] processPathsForMmRead(List<Path> dirs, JobConf conf,
@@ -592,6 +600,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     return dirs;
   }
 
+  @Override
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
     PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS);

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
index 6a372a3..e632d43 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
@@ -57,7 +57,11 @@ public class NullRowsInputFormat implements InputFormat<NullWritable, NullWritab
     }
 
     public DummyInputSplit(String path) {
-      super(new Path(path, "null"), 0, 1, (String[])null);
+      this(new Path(path, "null"));
+    }
+
+    public DummyInputSplit(Path path) {
+      super(path, 0, 1, (String[]) null);
     }
   }
 
@@ -119,7 +123,9 @@ public class NullRowsInputFormat implements InputFormat<NullWritable, NullWritab
     @Override
     public boolean next(Object arg0, Object value) throws IOException {
       if (rbCtx != null) {
-        if (counter >= MAX_ROW) return false;
+        if (counter >= MAX_ROW) {
+          return false;
+        }
         makeNullVrb(value, MAX_ROW);
         counter = MAX_ROW;
         return true;
@@ -163,7 +169,9 @@ public class NullRowsInputFormat implements InputFormat<NullWritable, NullWritab
   public InputSplit[] getSplits(JobConf conf, int arg1) throws IOException {
     // It's important to read the correct nulls! (in truth, the path is needed for SplitGrouper).
     String[] paths = conf.getTrimmedStrings(FileInputFormat.INPUT_DIR, (String[])null);
-    if (paths == null) throw new IOException("Cannot find path in conf");
+    if (paths == null) {
+      throw new IOException("Cannot find path in conf");
+    }
     InputSplit[] result = new InputSplit[paths.length];
     for (int i = 0; i < paths.length; ++i) {
       result[i] = new DummyInputSplit(paths[i]);

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
index f2b2fc5..fa7a8a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import org.apache.hadoop.hive.common.StringInternUtils;
-import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
 import java.util.ArrayList;
@@ -27,14 +26,12 @@ import java.util.Arrays;
 import java.util.BitSet;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Properties;
 import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
@@ -42,6 +39,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.IConfigureJobConf;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
@@ -54,9 +52,6 @@ import org.apache.hadoop.hive.ql.optimizer.physical.VectorizerReason;
 import org.apache.hadoop.hive.ql.parse.SplitSample;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.JobConf;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -184,7 +179,7 @@ public class MapWork extends BaseWork {
   public void addPathToAlias(Path path, ArrayList<String> aliases){
     pathToAliases.put(path, aliases);
   }
-  
+
   public void addPathToAlias(Path path, String newAlias){
     ArrayList<String> aliases = pathToAliases.get(path);
     if (aliases == null) {
@@ -194,11 +189,11 @@ public class MapWork extends BaseWork {
     aliases.add(newAlias.intern());
   }
 
-  
+
   public void removePathToAlias(Path path){
     pathToAliases.remove(path);
   }
-  
+
   /**
    * This is used to display and verify output of "Path -> Alias" in test framework.
    *
@@ -244,7 +239,7 @@ public class MapWork extends BaseWork {
   public void removePathToPartitionInfo(Path path) {
     pathToPartitionInfo.remove(path);
   }
-  
+
   /**
    * Derive additional attributes to be rendered by EXPLAIN.
    * TODO: this method is relied upon by custom input formats to set jobconf properties.
@@ -303,15 +298,28 @@ public class MapWork extends BaseWork {
   private static String deriveLlapIoDescString(boolean isLlapOn, boolean canWrapAny,
       boolean hasPathToPartInfo, boolean hasLlap, boolean hasNonLlap, boolean hasAcid,
       boolean hasCacheOnly) {
-    if (!isLlapOn) return null; // LLAP IO is off, don't output.
-    if (!canWrapAny && !hasCacheOnly) return "no inputs"; // Cannot use with input formats.
-    if (!hasPathToPartInfo) return "unknown"; // No information to judge.
-    int varieties = (hasAcid ? 1 : 0) + (hasLlap ? 1 : 0)
-        + (hasCacheOnly ? 1 : 0) + (hasNonLlap ? 1 : 0);
-    if (varieties > 1) return "some inputs"; // Will probably never actually happen.
-    if (hasAcid) return "may be used (ACID table)";
-    if (hasLlap) return "all inputs";
-    if (hasCacheOnly) return "all inputs (cache only)";
+    if (!isLlapOn) {
+      return null; // LLAP IO is off, don't output.
+    }
+    if (!canWrapAny && !hasCacheOnly) {
+      return "no inputs"; // Cannot use with input formats.
+    }
+    if (!hasPathToPartInfo) {
+      return "unknown"; // No information to judge.
+    }
+    int varieties = (hasAcid ? 1 : 0) + (hasLlap ? 1 : 0) + (hasCacheOnly ? 1 : 0) + (hasNonLlap ? 1 : 0);
+    if (varieties > 1) {
+      return "some inputs"; // Will probably never actually happen.
+    }
+    if (hasAcid) {
+      return "may be used (ACID table)";
+    }
+    if (hasLlap) {
+      return "all inputs";
+    }
+    if (hasCacheOnly) {
+      return "all inputs (cache only)";
+    }
     return "no inputs";
   }
 
@@ -641,6 +649,9 @@ public class MapWork extends BaseWork {
     for (FileSinkOperator fs : OperatorUtils.findOperators(mappers, FileSinkOperator.class)) {
       PlanUtils.configureJobConf(fs.getConf().getTableInfo(), job);
     }
+    for (IConfigureJobConf icjc : OperatorUtils.findOperators(mappers, IConfigureJobConf.class)) {
+      icjc.configureJobConf(job);
+    }
   }
 
   public void setDummyTableScan(boolean dummyTableScan) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
index ecfb118..ff5acbb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.plan;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
@@ -29,18 +28,12 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
-import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
-import org.apache.hadoop.hive.ql.optimizer.physical.VectorizerReason;
-import org.apache.hadoop.hive.ql.plan.BaseWork.BaseExplainVectorization;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
-import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hive.common.util.ReflectionUtil;
 
 /**
  * ReduceWork represents all the information used to run a reduce task on the cluster.

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/groupby_rollup_empty.q b/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
index 432d8c4..b64eef9 100644
--- a/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
+++ b/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
@@ -22,6 +22,10 @@ from    tx1
 where	a<0
 group by rollup (b);
 
+select '2 rows expected',sum(c) from tx1 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx1 group by rollup (a);
+
 -- non-empty table
 
 insert into tx1 values (1,1,1);
@@ -64,6 +68,9 @@ from    tx2
 where	a<0
 group by a,b,d grouping sets ((), b, a, d);
 
+select '2 rows expected',sum(c) from tx2 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx2 group by rollup (a);
 
 insert into tx2 values
 (1,2,3,1.1,'x','b'),

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out b/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
index 7359140..2756d38 100644
--- a/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
+++ b/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
@@ -63,6 +63,20 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tx1
 #### A masked pattern was here ####
 NULL	1	NULL,1
+PREHOOK: query: select '2 rows expected',sum(c) from tx1 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx1 group by rollup (a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select '2 rows expected',sum(c) from tx1 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx1 group by rollup (a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+2 rows expected	NULL
+2 rows expected	NULL
 PREHOOK: query: insert into tx1 values (1,1,1)
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -225,6 +239,20 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tx2
 #### A masked pattern was here ####
 NULL	NULL	asd	1	NULL,1
+PREHOOK: query: select '2 rows expected',sum(c) from tx2 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx2 group by rollup (a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx2
+#### A masked pattern was here ####
+POSTHOOK: query: select '2 rows expected',sum(c) from tx2 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx2 group by rollup (a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx2
+#### A masked pattern was here ####
+2 rows expected	NULL
+2 rows expected	NULL
 PREHOOK: query: insert into tx2 values
 (1,2,3,1.1,'x','b'),
 (3,2,3,1.1,'y','b')

http://git-wip-us.apache.org/repos/asf/hive/blob/f942e72a/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out b/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
index d2b5745..f2cda04 100644
--- a/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
+++ b/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
@@ -63,6 +63,20 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tx1
 #### A masked pattern was here ####
 NULL	1	NULL,1
+PREHOOK: query: select '2 rows expected',sum(c) from tx1 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx1 group by rollup (a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select '2 rows expected',sum(c) from tx1 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx1 group by rollup (a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+2 rows expected	NULL
+2 rows expected	NULL
 PREHOOK: query: insert into tx1 values (1,1,1)
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -234,6 +248,20 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tx2
 #### A masked pattern was here ####
 NULL	NULL	asd	1	NULL,1
+PREHOOK: query: select '2 rows expected',sum(c) from tx2 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx2 group by rollup (a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx2
+#### A masked pattern was here ####
+POSTHOOK: query: select '2 rows expected',sum(c) from tx2 group by rollup (a)
+union all
+select '2 rows expected',sum(c) from tx2 group by rollup (a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx2
+#### A masked pattern was here ####
+2 rows expected	NULL
+2 rows expected	NULL
 PREHOOK: query: insert into tx2 values
 (1,2,3,1.1,'x','b'),
 (3,2,3,1.1,'y','b')


[2/3] hive git commit: HIVE-17991: Remove CommandNeedRetryException (Zoltan Haindrich reviewed by Ashutosh Chauhan)

Posted by kg...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
index 6ae3d92..0417750 100644
--- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
+++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -98,7 +97,7 @@ public class TestCommands {
   }
 
   @Test
-  public void testDropDatabaseCommand() throws HCatException, CommandNeedRetryException {
+  public void testDropDatabaseCommand() throws HCatException {
     String dbName = "cmd_testdb";
     int evid = 999;
     Command testCmd = new DropDatabaseCommand(dbName, evid);
@@ -130,7 +129,7 @@ public class TestCommands {
   }
 
   @Test
-  public void testDropTableCommand() throws HCatException, CommandNeedRetryException {
+  public void testDropTableCommand() throws HCatException {
     String dbName = "cmd_testdb";
     String tableName = "cmd_testtable";
     int evid = 789;
@@ -210,7 +209,7 @@ public class TestCommands {
   }
 
   @Test
-  public void testDropPartitionCommand() throws HCatException, CommandNeedRetryException, MetaException {
+  public void testDropPartitionCommand() throws HCatException, MetaException {
     String dbName = "cmd_testdb";
     String tableName = "cmd_testtable";
     int evid = 789;
@@ -302,7 +301,7 @@ public class TestCommands {
   }
 
   @Test
-  public void testDropTableCommand2() throws HCatException, CommandNeedRetryException, MetaException {
+  public void testDropTableCommand2() throws HCatException, MetaException {
     // Secondary DropTableCommand test for testing repl-drop-tables' effect on partitions inside a partitioned table
     // when there exist partitions inside the table which are older than the drop event.
     // Our goal is this : Create a table t, with repl.last.id=157, say.
@@ -373,7 +372,7 @@ public class TestCommands {
 
 
   @Test
-  public void testBasicReplEximCommands() throws IOException, CommandNeedRetryException {
+  public void testBasicReplEximCommands() throws IOException {
     // repl export, has repl.last.id and repl.scope=all in it
     // import repl dump, table has repl.last.id on it (will likely be 0)
     int evid = 111;
@@ -454,7 +453,7 @@ public class TestCommands {
   }
 
   @Test
-  public void testMetadataReplEximCommands() throws IOException, CommandNeedRetryException {
+  public void testMetadataReplEximCommands() throws IOException {
     // repl metadata export, has repl.last.id and repl.scope=metadata
     // import repl metadata dump, table metadata changed, allows override, has repl.last.id
     int evid = 222;
@@ -534,7 +533,7 @@ public class TestCommands {
 
 
   @Test
-  public void testNoopReplEximCommands() throws CommandNeedRetryException, IOException {
+  public void testNoopReplEximCommands() throws Exception {
     // repl noop export on non-existant table, has repl.noop, does not error
     // import repl noop dump, no error
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
index cde728e..63a7313 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
@@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql;
 
 import java.io.IOException;
 import java.net.URI;
-import java.net.URISyntaxException;
 import java.util.HashMap;
 import junit.framework.JUnit4TestAdapter;
 import junit.framework.TestCase;
@@ -31,7 +30,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
 import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
 import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -143,7 +141,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
       }
   }
 
-  private void cleanup() throws CommandNeedRetryException {
+  private void cleanup() throws Exception {
       String[] srcidx = {Index1Name, Index2Name};
       for (String src : srcidx) {
         driver.run("DROP INDEX IF EXISTS " + src + " ON " + Table1Name);
@@ -159,7 +157,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
       }
   }
 
-  private void executeQuery(String query) throws CommandNeedRetryException {
+  private void executeQuery(String query) throws Exception {
     CommandProcessorResponse result =  driver.run(query);
     assertNotNull("driver.run() was expected to return result for query: " + query, result);
     assertEquals("Execution of (" + query + ") failed with exit status: "
@@ -173,7 +171,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
   }
 
   private void addPartitionAndCheck(Table table, String column,
-          String value, String location) throws CommandNeedRetryException, HiveException {
+      String value, String location) throws Exception {
     executeQuery("ALTER TABLE " + table.getTableName() +
             " ADD PARTITION (" + column + "='" + value + "')" +
             buildLocationClause(location));
@@ -197,8 +195,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
     }
   }
 
-  private void alterPartitionAndCheck(Table table, String column,
-      String value, String location) throws CommandNeedRetryException, HiveException {
+  private void alterPartitionAndCheck(Table table, String column, String value, String location) throws Exception {
     assertNotNull(location);
     executeQuery("ALTER TABLE " + table.getTableName() +
         " PARTITION (" + column + "='" + value + "')" +
@@ -219,12 +216,11 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
   }
 
   private Table createTableAndCheck(String tableName, String tableLocation)
-          throws CommandNeedRetryException, HiveException, URISyntaxException {
+      throws Exception {
     return createTableAndCheck(null, tableName, tableLocation);
   }
 
-  private Table createTableAndCheck(Table baseTable, String tableName, String tableLocation)
-          throws CommandNeedRetryException, HiveException, URISyntaxException {
+  private Table createTableAndCheck(Table baseTable, String tableName, String tableLocation) throws Exception {
     executeQuery("CREATE TABLE " + tableName + (baseTable == null ?
             " (col1 string, col2 string) PARTITIONED BY (p string) " :
             " LIKE " + baseTable.getTableName())
@@ -244,8 +240,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
     return table;
   }
 
-  private void createIndexAndCheck(Table table, String indexName, String indexLocation)
-          throws CommandNeedRetryException, HiveException, URISyntaxException {
+  private void createIndexAndCheck(Table table, String indexName, String indexLocation) throws Exception {
     executeQuery("CREATE INDEX " + indexName + " ON TABLE " + table.getTableName()
             + " (col1) AS 'COMPACT' WITH DEFERRED REBUILD "
             + buildLocationClause(indexLocation));
@@ -263,8 +258,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode extends TestCase {
     }
   }
 
-  private void createDatabaseAndCheck(String databaseName, String databaseLocation)
-          throws CommandNeedRetryException, HiveException, URISyntaxException {
+  private void createDatabaseAndCheck(String databaseName, String databaseLocation) throws Exception {
     executeQuery("CREATE DATABASE " + databaseName + buildLocationClause(databaseLocation));
     Database database = db.getDatabase(databaseName);
     assertNotNull("Database object is expected for " + databaseName , database);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index d1d2d1f..d763666 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -50,8 +50,6 @@ import org.apache.hadoop.hive.metastore.messaging.event.filters.AndFilter;
 import org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTableFilter;
 import org.apache.hadoop.hive.metastore.messaging.event.filters.EventBoundaryFilter;
 import org.apache.hadoop.hive.metastore.messaging.event.filters.MessageFormatFilter;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork;
@@ -80,10 +78,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.Map;
-
-import static junit.framework.Assert.assertFalse;
-import static junit.framework.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -587,22 +583,19 @@ public class TestReplicationScenarios {
           // getTable is invoked after fetching the table names
           injectionPathCalled = true;
           Thread t = new Thread(new Runnable() {
+            @Override
             public void run() {
-              try {
-                LOG.info("Entered new thread");
-                IDriver driver2 = DriverFactory.newDriver(hconf);
-                SessionState.start(new CliSessionState(hconf));
-                CommandProcessorResponse ret = driver2.run("ALTER TABLE " + dbName + ".ptned PARTITION (b=1) RENAME TO PARTITION (b=10)");
-                success = (ret.getException() == null);
-                assertFalse(success);
-                ret = driver2.run("ALTER TABLE " + dbName + ".ptned RENAME TO " + dbName + ".ptned_renamed");
-                success = (ret.getException() == null);
-                assertFalse(success);
-                LOG.info("Exit new thread success - {}", success);
-              } catch (CommandNeedRetryException e) {
-                LOG.info("Hit Exception {} from new thread", e.getMessage());
-                throw new RuntimeException(e);
-              }
+              LOG.info("Entered new thread");
+              IDriver driver2 = DriverFactory.newDriver(hconf);
+              SessionState.start(new CliSessionState(hconf));
+              CommandProcessorResponse ret =
+                  driver2.run("ALTER TABLE " + dbName + ".ptned PARTITION (b=1) RENAME TO PARTITION (b=10)");
+              success = (ret.getException() == null);
+              assertFalse(success);
+              ret = driver2.run("ALTER TABLE " + dbName + ".ptned RENAME TO " + dbName + ".ptned_renamed");
+              success = (ret.getException() == null);
+              assertFalse(success);
+              LOG.info("Exit new thread success - {}", success);
             }
           });
           t.start();
@@ -662,19 +655,15 @@ public class TestReplicationScenarios {
           // getTable is invoked after fetching the table names
           injectionPathCalled = true;
           Thread t = new Thread(new Runnable() {
+            @Override
             public void run() {
-              try {
-                LOG.info("Entered new thread");
-                IDriver driver2 = DriverFactory.newDriver(hconf);
-                SessionState.start(new CliSessionState(hconf));
-                CommandProcessorResponse ret = driver2.run("DROP TABLE " + dbName + ".ptned");
-                success = (ret.getException() == null);
-                assertTrue(success);
-                LOG.info("Exit new thread success - {}", success);
-              } catch (CommandNeedRetryException e) {
-                LOG.info("Hit Exception {} from new thread", e.getMessage());
-                throw new RuntimeException(e);
-              }
+              LOG.info("Entered new thread");
+              IDriver driver2 = DriverFactory.newDriver(hconf);
+              SessionState.start(new CliSessionState(hconf));
+              CommandProcessorResponse ret = driver2.run("DROP TABLE " + dbName + ".ptned");
+              success = (ret.getException() == null);
+              assertTrue(success);
+              LOG.info("Exit new thread success - {}", success);
             }
           });
           t.start();
@@ -3124,7 +3113,7 @@ public class TestReplicationScenarios {
       List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(dbName+ "_dupe" , "tbl6"));
       assertEquals(nns.size(), 1);
       nnName = nns.get(0).getNn_name();
-      
+
     } catch (TException te) {
       assertNull(te);
     }
@@ -3616,12 +3605,7 @@ public class TestReplicationScenarios {
   private String getResult(int rowNum, int colNum, boolean reuse, IDriver myDriver) throws IOException {
     if (!reuse) {
       lastResults = new ArrayList<String>();
-      try {
-        myDriver.getResults(lastResults);
-      } catch (CommandNeedRetryException e) {
-        e.printStackTrace();
-        throw new RuntimeException(e);
-      }
+      myDriver.getResults(lastResults);
     }
     // Split around the 'tab' character
     return (lastResults.get(rowNum).split("\\t"))[colNum];
@@ -3646,12 +3630,7 @@ public class TestReplicationScenarios {
 
   private List<String> getOutput(IDriver myDriver) throws IOException {
     List<String> results = new ArrayList<>();
-    try {
-      myDriver.getResults(results);
-    } catch (CommandNeedRetryException e) {
-      LOG.warn(e.getMessage(),e);
-      throw new RuntimeException(e);
-    }
+    myDriver.getResults(results);
     return results;
   }
 
@@ -3772,19 +3751,10 @@ public class TestReplicationScenarios {
 
   private static boolean run(String cmd, boolean errorOnFail, IDriver myDriver) throws RuntimeException {
     boolean success = false;
-    try {
-      CommandProcessorResponse ret = myDriver.run(cmd);
-      success = ((ret.getException() == null) && (ret.getErrorMessage() == null));
-      if (!success){
-        LOG.warn("Error {} : {} running [{}].", ret.getErrorCode(), ret.getErrorMessage(), cmd);
-      }
-    } catch (CommandNeedRetryException e) {
-      if (errorOnFail){
-        throw new RuntimeException(e);
-      } else {
-        LOG.warn(e.getMessage(),e);
-        // do nothing else
-      }
+    CommandProcessorResponse ret = myDriver.run(cmd);
+    success = ((ret.getException() == null) && (ret.getErrorMessage() == null));
+    if (!success) {
+      LOG.warn("Error {} : {} running [{}].", ret.getErrorCode(), ret.getErrorMessage(), cmd);
     }
     return success;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
index dd6fa42..33e5157 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork;
@@ -160,12 +159,7 @@ class WarehouseInstance implements Closeable {
   private String row0Result(int colNum, boolean reuse) throws IOException {
     if (!reuse) {
       lastResults = new ArrayList<>();
-      try {
-        driver.getResults(lastResults);
-      } catch (CommandNeedRetryException e) {
-        e.printStackTrace();
-        throw new RuntimeException(e);
-      }
+      driver.getResults(lastResults);
     }
     // Split around the 'tab' character
     return (lastResults.get(0).split("\\t"))[colNum];
@@ -265,12 +259,7 @@ class WarehouseInstance implements Closeable {
 
   List<String> getOutput() throws IOException {
     List<String> results = new ArrayList<>();
-    try {
-      driver.getResults(results);
-    } catch (CommandNeedRetryException e) {
-      logger.warn(e.getMessage(), e);
-      throw new RuntimeException(e);
-    }
+    driver.getResults(results);
     return results;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
index 6f2405c..bc2a34a 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
@@ -37,7 +37,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
@@ -114,7 +113,7 @@ public class TestHiveAuthorizerCheckInvocation {
         "stored as orc TBLPROPERTIES ('transactional'='true')");
   }
 
-  private static void runCmd(String cmd) throws CommandNeedRetryException {
+  private static void runCmd(String cmd) throws Exception {
     CommandProcessorResponse resp = driver.run(cmd);
     assertEquals(0, resp.getResponseCode());
   }
@@ -131,8 +130,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testInputSomeColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testInputSomeColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select i from " + tableName
@@ -148,8 +146,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testInputSomeColumnsUsedView() throws HiveAuthzPluginException, HiveAccessControlException,
-  CommandNeedRetryException {
+  public void testInputSomeColumnsUsedView() throws Exception {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select i from " + viewName
@@ -165,15 +162,14 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testInputSomeColumnsUsedJoin() throws HiveAuthzPluginException, HiveAccessControlException,
-  CommandNeedRetryException {
-    
+  public void testInputSomeColumnsUsedJoin() throws Exception {
+
     reset(mockedAuthorizer);
     int status = driver.compile("select " + viewName + ".i, " + tableName + ".city from "
         + viewName + " join " + tableName + " on " + viewName + ".city = " + tableName
         + ".city where " + tableName + ".k = 'X'");
     assertEquals(0, status);
-    
+
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
     Collections.sort(inputs);
     assertEquals(inputs.size(), 2);
@@ -194,8 +190,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testInputAllColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testInputAllColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select * from " + tableName + " order by i");
@@ -210,15 +205,13 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testCreateTableWithDb() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testCreateTableWithDb() throws Exception {
     final String newTable = "ctTableWithDb";
     checkCreateViewOrTableWithDb(newTable, "create table " + dbName + "." + newTable + "(i int)");
   }
 
   @Test
-  public void testCreateViewWithDb() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testCreateViewWithDb() throws Exception {
     final String newTable = "ctViewWithDb";
     checkCreateViewOrTableWithDb(newTable, "create table " + dbName + "." + newTable + "(i int)");
   }
@@ -251,8 +244,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testInputNoColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testInputNoColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
     int status = driver.compile("describe " + tableName);
@@ -265,8 +257,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testPermFunction() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testPermFunction() throws Exception {
 
     reset(mockedAuthorizer);
     final String funcName = "testauthfunc1";
@@ -296,8 +287,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testTempFunction() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testTempFunction() throws Exception {
 
     reset(mockedAuthorizer);
     final String funcName = "testAuthFunc2";
@@ -313,8 +303,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testUpdateSomeColumnsUsed() throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException {
+  public void testUpdateSomeColumnsUsed() throws Exception {
     reset(mockedAuthorizer);
     int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3");
     assertEquals(0, status);
@@ -333,8 +322,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testUpdateSomeColumnsUsedExprInSet() throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException {
+  public void testUpdateSomeColumnsUsedExprInSet() throws Exception {
     reset(mockedAuthorizer);
     int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3");
     assertEquals(0, status);
@@ -355,8 +343,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testDelete() throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException {
+  public void testDelete() throws Exception {
     reset(mockedAuthorizer);
     int status = driver.compile("delete from " + acidTableName + " where j = 3");
     assertEquals(0, status);
@@ -370,8 +357,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testShowTables() throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException {
+  public void testShowTables() throws Exception {
     reset(mockedAuthorizer);
     int status = driver.compile("show tables");
     assertEquals(0, status);
@@ -384,8 +370,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testDescDatabase() throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException {
+  public void testDescDatabase() throws Exception {
     reset(mockedAuthorizer);
     int status = driver.compile("describe database " + dbName);
     assertEquals(0, status);
@@ -411,8 +396,7 @@ public class TestHiveAuthorizerCheckInvocation {
   }
 
   @Test
-  public void testReplDump() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException {
+  public void testReplDump() throws Exception {
 
     resetAuthorizer();
     int status = driver.compile("repl dump " + dbName);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
index 2be86f8..8981223 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin;
 import static org.junit.Assert.assertEquals;
 import static org.mockito.Matchers.any;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -30,7 +29,6 @@ import java.util.List;
 import org.apache.hadoop.hive.UtilsForTest;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -90,7 +88,7 @@ public class TestHiveAuthorizerShowFilters {
         return filteredResults;
       }
     }
-    
+
     @Override
     public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
         HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) {
@@ -153,46 +151,40 @@ public class TestHiveAuthorizerShowFilters {
   }
 
   @Test
-  public void testShowDatabasesAll() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException, IOException {
+  public void testShowDatabasesAll() throws Exception {
     runShowDbTest(AllDbs);
   }
 
   @Test
-  public void testShowDatabasesSelected() throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException, IOException {
+  public void testShowDatabasesSelected() throws Exception {
     setFilteredResults(HivePrivilegeObjectType.DATABASE, dbName2);
     runShowDbTest(Arrays.asList(dbName2));
   }
 
-  private void runShowDbTest(List<String> expectedDbList) throws HiveAuthzPluginException,
-      HiveAccessControlException, CommandNeedRetryException, IOException {
+  private void runShowDbTest(List<String> expectedDbList) throws Exception {
     runCmd("show databases");
     verifyAllDb();
     assertEquals("filtered result check ", expectedDbList, getSortedResults());
   }
 
   @Test
-  public void testShowTablesAll() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException, IOException {
+  public void testShowTablesAll() throws Exception {
     runShowTablesTest(AllTables);
   }
 
   @Test
-  public void testShowTablesSelected() throws HiveAuthzPluginException, HiveAccessControlException,
-      CommandNeedRetryException, IOException {
+  public void testShowTablesSelected() throws Exception {
     setFilteredResults(HivePrivilegeObjectType.TABLE_OR_VIEW, tableName2);
     runShowTablesTest(Arrays.asList(tableName2));
   }
 
-  private void runShowTablesTest(List<String> expectedTabs) throws IOException,
-      CommandNeedRetryException, HiveAuthzPluginException, HiveAccessControlException {
+  private void runShowTablesTest(List<String> expectedTabs) throws Exception {
     runCmd("show tables");
     verifyAllTables();
     assertEquals("filtered result check ", expectedTabs, getSortedResults());
   }
 
-  private List<String> getSortedResults() throws IOException, CommandNeedRetryException {
+  private List<String> getSortedResults() throws Exception {
     List<String> res = new ArrayList<String>();
     // set results to be returned
     driver.getResults(res);
@@ -262,7 +254,7 @@ public class TestHiveAuthorizerShowFilters {
     }
   }
 
-  private static void runCmd(String cmd) throws CommandNeedRetryException {
+  private static void runCmd(String cmd) throws Exception {
     CommandProcessorResponse resp = driver.run(cmd);
     assertEquals(0, resp.getResponseCode());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index 1305902..a5e6293 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -55,7 +55,6 @@ import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
 import org.apache.hadoop.hive.metastore.txn.TxnStore;
 import org.apache.hadoop.hive.metastore.txn.TxnUtils;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.AcidInputFormat;
@@ -1425,7 +1424,7 @@ public class TestCompactor {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws Exception {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 2bc33bd..2d0aca0 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1313,13 +1313,7 @@ public class QTestUtil {
   }
 
   public int execute(String tname) {
-    try {
-      return drv.run(qMap.get(tname)).getResponseCode();
-    } catch (CommandNeedRetryException e) {
-      LOG.error("driver failed to run the command: " + tname + " due to the exception: ", e);
-      e.printStackTrace();
-      return -1;
-    }
+    return drv.run(qMap.get(tname)).getResponseCode();
   }
 
   public int executeClient(String tname1, String tname2) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/CommandNeedRetryException.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/CommandNeedRetryException.java b/ql/src/java/org/apache/hadoop/hive/ql/CommandNeedRetryException.java
deleted file mode 100644
index 1b60005..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/CommandNeedRetryException.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql;
-
-public class CommandNeedRetryException extends Exception {
-
-  private static final long serialVersionUID = 1L;
-
-  public CommandNeedRetryException() {
-    super();
-  }
-
-  public CommandNeedRetryException(String message) {
-    super(message);
-  }
-
-  public CommandNeedRetryException(Throwable cause) {
-    super(cause);
-  }
-
-  public CommandNeedRetryException(String message, Throwable cause) {
-    super(message, cause);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/Context.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
index 820fbf0..dba2dbb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -100,8 +100,6 @@ public class Context {
   protected String cboInfo;
   protected boolean cboSucceeded;
   protected String cmd = "";
-  // number of previous attempts
-  protected int tryCount = 0;
   private TokenRewriteStream tokenRewriteStream;
   // Holds the qualified name to tokenRewriteStream for the views
   // referenced by the query. This is used to rewrite the view AST
@@ -178,6 +176,7 @@ public class Context {
     DestClausePrefix(String prefix) {
       this.prefix = prefix;
     }
+    @Override
     public String toString() {
       return prefix;
     }
@@ -941,14 +940,6 @@ public class Context {
     this.needLockMgr = needLockMgr;
   }
 
-  public int getTryCount() {
-    return tryCount;
-  }
-
-  public void setTryCount(int tryCount) {
-    this.tryCount = tryCount;
-  }
-
   public String getCboInfo() {
     return cboInfo;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 74595b0..2d7e459 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -158,7 +158,6 @@ public class Driver implements IDriver {
 
   // A limit on the number of threads that can be launched
   private int maxthreads;
-  private int tryCount = Integer.MAX_VALUE;
 
   private String userName;
 
@@ -577,7 +576,6 @@ public class Driver implements IDriver {
         setTriggerContext(queryId);
       }
 
-      ctx.setTryCount(getTryCount());
       ctx.setCmd(command);
       ctx.setHDFSCleanup(true);
 
@@ -1377,19 +1375,16 @@ public class Driver implements IDriver {
 
   @Override
 
-  public CommandProcessorResponse run(String command)
-      throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command) {
     return run(command, false);
   }
 
   @Override
-  public CommandProcessorResponse run()
-      throws CommandNeedRetryException {
+  public CommandProcessorResponse run() {
     return run(null, true);
   }
 
-  public CommandProcessorResponse run(String command, boolean alreadyCompiled)
-        throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command, boolean alreadyCompiled) {
 
     try {
       runInternal(command, alreadyCompiled);
@@ -1579,8 +1574,7 @@ public class Driver implements IDriver {
     return compileLock;
   }
 
-  private void runInternal(String command, boolean alreadyCompiled)
-      throws CommandNeedRetryException, CommandProcessorResponse {
+  private void runInternal(String command, boolean alreadyCompiled) throws CommandProcessorResponse {
     errorMessage = null;
     SQLState = null;
     downstreamError = null;
@@ -1794,7 +1788,7 @@ public class Driver implements IDriver {
     return new CommandProcessorResponse(ret, errorMessage, SQLState, downstreamError);
   }
 
-  private void execute() throws CommandNeedRetryException, CommandProcessorResponse {
+  private void execute() throws CommandProcessorResponse {
     PerfLogger perfLogger = SessionState.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_EXECUTE);
 
@@ -1949,13 +1943,6 @@ public class Driver implements IDriver {
         checkInterrupted("when checking the execution result.", hookContext, perfLogger);
 
         if (exitVal != 0) {
-          if (tsk.ifRetryCmdWhenFail()) {
-            driverCxt.shutdown();
-            // in case we decided to run everything in local mode, restore the
-            // the jobtracker setting to its initial value
-            ctx.restoreOriginalTracker();
-            throw new CommandNeedRetryException();
-          }
           Task<? extends Serializable> backupTask = tsk.getAndInitBackupTask();
           if (backupTask != null) {
             setErrorMsgAndDetail(exitVal, result.getTaskError(), tsk);
@@ -2057,9 +2044,6 @@ public class Driver implements IDriver {
         SessionState.get().getHiveHistory().printRowCount(queryId);
       }
       releasePlan(plan);
-    } catch (CommandNeedRetryException e) {
-      executionError = true;
-      throw e;
     } catch (CommandProcessorResponse cpr) {
       executionError = true;
       throw cpr;
@@ -2276,7 +2260,7 @@ public class Driver implements IDriver {
 
   @SuppressWarnings("unchecked")
   @Override
-  public boolean getResults(List res) throws IOException, CommandNeedRetryException {
+  public boolean getResults(List res) throws IOException {
     if (lDrvState.driverState == DriverState.DESTROYED || lDrvState.driverState == DriverState.CLOSED) {
       throw new IOException("FAILED: query has been cancelled, closed, or destroyed.");
     }
@@ -2359,15 +2343,6 @@ public class Driver implements IDriver {
     }
   }
 
-  public int getTryCount() {
-    return tryCount;
-  }
-
-  @Override
-  public void setTryCount(int tryCount) {
-    this.tryCount = tryCount;
-  }
-
   // DriverContext could be released in the query and close processes at same
   // time, which needs to be thread protected.
   private void releaseDriverContext() {

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
index 6280be0..d4494cc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
@@ -41,15 +41,13 @@ public interface IDriver extends CommandProcessor {
 
   void setOperationId(String guid64);
 
-  void setTryCount(int maxValue);
-
-  CommandProcessorResponse run() throws CommandNeedRetryException;
+  CommandProcessorResponse run();
   @Override
-  CommandProcessorResponse run(String command) throws CommandNeedRetryException;
+  CommandProcessorResponse run(String command);
 
 
   // create some "cover" to the result?
-  boolean getResults(List res) throws IOException, CommandNeedRetryException;
+  boolean getResults(List res) throws IOException;
 
   void setMaxRows(int maxRows);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
index cc67b87..9a77c29 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
@@ -79,7 +79,6 @@ public class QueryDisplay {
     private StageType taskType;
     private String name;
     private boolean requireLock;
-    private boolean retryIfFail;
     private String statusMessage;
 
     // required for jackson
@@ -92,7 +91,6 @@ public class QueryDisplay {
       taskType = task.getType();
       name = task.getName();
       requireLock = task.requireLock();
-      retryIfFail = task.ifRetryCmdWhenFail();
     }
     @JsonIgnore
     public synchronized String getStatus() {
@@ -147,10 +145,6 @@ public class QueryDisplay {
     public synchronized boolean isRequireLock() {
       return requireLock;
     }
-    @JsonIgnore
-    public synchronized boolean isRetryIfFail() {
-      return retryIfFail;
-    }
 
     public synchronized String getExternalHandle() {
       return externalHandle;

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
index a7dace9..090a188 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
@@ -24,7 +24,6 @@ import java.util.List;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -130,7 +129,7 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
     this.maxRows = maxRows;
   }
 
-  public boolean fetch(List res) throws IOException, CommandNeedRetryException {
+  public boolean fetch(List res) throws IOException {
     sink.reset(res);
     int rowsRet = work.getLeastNumRows();
     if (rowsRet <= 0) {
@@ -145,7 +144,7 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
       while (sink.getNumRows() < rowsRet) {
         if (!fetch.pushRow()) {
           if (work.getLeastNumRows() > 0) {
-            throw new CommandNeedRetryException();
+            throw new HiveException("leastNumRows check failed");
           }
 
           // Closing the operator can sometimes yield more rows (HIVE-11892)
@@ -156,8 +155,6 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
         fetched = true;
       }
       return true;
-    } catch (CommandNeedRetryException e) {
-      throw e;
     } catch (IOException e) {
       throw e;
     } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index 7124c89..a8d851f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -579,14 +579,6 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
     return false;
   }
 
-  public boolean ifRetryCmdWhenFail() {
-    return retryCmdWhenFail;
-  }
-
-  public void setRetryCmdWhenFail(boolean retryCmdWhenFail) {
-    this.retryCmdWhenFail = retryCmdWhenFail;
-  }
-
   public QueryPlan getQueryPlan() {
     return queryPlan;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
index e656eb0..bd0cbab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
@@ -75,7 +75,7 @@ public class GlobalLimitOptimizer extends Transform {
     // The query only qualifies when there are only one top operator
     // and there is no transformer or UDTF and no block sampling
     // is used.
-    if (ctx.getTryCount() == 0 && topOps.size() == 1
+    if (topOps.size() == 1
         && !globalLimitCtx.ifHasTransformOrUDTF() &&
         nameToSplitSample.isEmpty()) {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
index f01b497..63b13c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryState;
@@ -147,8 +146,6 @@ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
         config.setOpIdToRuntimeNumRows(aggregateStats(config.getExplainRootPath()));
       } catch (IOException e1) {
         throw new SemanticException(e1);
-      } catch (CommandNeedRetryException e) {
-        throw new SemanticException(e);
       }
       ctx.resetOpContext();
       ctx.resetStream();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index 0c1c4e0..92d29e3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -47,8 +47,6 @@ import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
-import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
@@ -72,7 +70,6 @@ import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.StatsWork;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.ql.session.LineageState;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -362,14 +359,6 @@ public abstract class TaskCompiler {
     if (globalLimitCtx.isEnable() && globalLimitCtx.getLastReduceLimitDesc() != null) {
       LOG.info("set least row check for LimitDesc: " + globalLimitCtx.getGlobalLimit());
       globalLimitCtx.getLastReduceLimitDesc().setLeastRows(globalLimitCtx.getGlobalLimit());
-      List<ExecDriver> mrTasks = Utilities.getMRTasks(rootTasks);
-      for (ExecDriver tsk : mrTasks) {
-        tsk.setRetryCmdWhenFail(true);
-      }
-      List<SparkTask> sparkTasks = Utilities.getSparkTasks(rootTasks);
-      for (SparkTask sparkTask : sparkTasks) {
-        sparkTask.setRetryCmdWhenFail(true);
-      }
     }
 
     Interner<TableDesc> interner = Interners.newStrongInterner();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
index 3624d08..c753264 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-
 public interface CommandProcessor {
-  CommandProcessorResponse run(String command) throws CommandNeedRetryException;
+  CommandProcessorResponse run(String command);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
index 07d70ab..fad4f52 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
@@ -33,7 +33,6 @@ import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -104,7 +103,7 @@ public class CompileProcessor implements CommandProcessor {
    * @return CommandProcessorResponse with 0 for success and 1 for failure
    */
   @Override
-  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     this.command = command;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
index 6825dd8..d1202f9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
@@ -28,7 +28,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.HadoopShims;
 
@@ -82,7 +81,7 @@ public class CryptoProcessor implements CommandProcessor {
   }
 
   @Override
-  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command) {
     String[] args = command.split("\\s+");
 
     if (args.length < 1) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
index 2f288ce..62a1725 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -102,7 +102,7 @@ public class DfsProcessor implements CommandProcessor {
     }
   }
 
-  private String[] splitCmd(String command) throws CommandNeedRetryException {
+  private String[] splitCmd(String command) throws HiveException {
 
     ArrayList<String> paras = new ArrayList<String>();
     int cmdLng = command.length();
@@ -151,8 +151,9 @@ public class DfsProcessor implements CommandProcessor {
     }
 
     if ((int) y != 0) {
-      console.printError("Syntax error on hadoop options: dfs " + command);
-      throw new CommandNeedRetryException();
+      String message = "Syntax error on hadoop options: dfs " + command;
+      console.printError(message);
+      throw new HiveException(message);
     }
 
     return paras.toArray(new String[paras.size()]);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
index 7ec36be..91a6aba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.processors;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import java.util.Arrays;
@@ -44,7 +43,7 @@ public class ListResourceProcessor implements CommandProcessor {
   }
 
   @Override
-  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     String[] tokens = command.split("\\s+");
     SessionState.ResourceType t;

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
index b82bd5c..4caab91 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
@@ -32,7 +31,7 @@ public class ReloadProcessor implements CommandProcessor{
   private static final Logger LOG = LoggerFactory.getLogger(ReloadProcessor.class);
 
   @Override
-  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     try {
       ss.loadReloadableAuxJars();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
index 144f522..ca39ff9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -43,12 +42,12 @@ public class ResetProcessor implements CommandProcessor {
   private final static String DEFAULT_ARG = "-d";
 
   @Override
-  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
+  public CommandProcessorResponse run(String command) {
     return run(SessionState.get(), command);
   }
 
   @VisibleForTesting
-  CommandProcessorResponse run(SessionState ss, String command) throws CommandNeedRetryException {
+  CommandProcessorResponse run(SessionState ss, String command) {
     CommandProcessorResponse authErrResp =
         CommandUtil.authorizeCommand(ss, HiveOperationType.RESET, Arrays.asList(command));
     if (authErrResp != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
index 4508e59..26c6700 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.metastore.txn.TxnUtils;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -300,10 +299,6 @@ public class Worker extends CompactorThread {
             (ci.partName == null ? "" : "/" + ci.partName) + " due to: " + cpr);
         }
       }
-      catch(CommandNeedRetryException cnre) {
-        throw new IOException("Could not update stats for table " + ci.getFullTableName() +
-          (ci.partName == null ? "" : "/" + ci.partName) + " due to: " + cnre.getMessage());
-      }
       finally {
         if(localSession != null) {
           localSession.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
index d56002d..1e83799 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
@@ -58,7 +58,6 @@ import org.apache.hadoop.hive.llap.security.LlapTokenIdentifier;
 import org.apache.hadoop.hive.llap.security.LlapTokenLocalClient;
 import org.apache.hadoop.hive.llap.tez.Converters;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -269,13 +268,9 @@ public class GenericUDTFGetSplits extends GenericUDTF {
         String ctas = "create temporary table " + tableName + " as " + query;
         LOG.info("Materializing the query for LLAPIF; CTAS: " + ctas);
 
-        try {
-          driver.resetQueryState();
-          HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, originalMode);
-          cpr = driver.run(ctas, false);
-        } catch (CommandNeedRetryException e) {
-          throw new HiveException(e);
-        }
+        driver.resetQueryState();
+        HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_MODE, originalMode);
+        cpr = driver.run(ctas, false);
 
         if(cpr.getResponseCode() != 0) {
           throw new HiveException("Failed to create temp table: " + cpr.getException());

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
index 0662875..06a96d5 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestQueryHooks.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.hooks;
 import com.google.common.collect.Lists;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -55,7 +54,7 @@ public class TestQueryHooks {
   }
 
   @Test
-  public void testAllQueryLifeTimeWithParseHooks() throws IllegalAccessException, ClassNotFoundException, InstantiationException, CommandNeedRetryException {
+  public void testAllQueryLifeTimeWithParseHooks() throws Exception {
     String query = "select 1";
     ArgumentMatcher<QueryLifeTimeHookContext> argMatcher = new QueryLifeTimeHookContextMatcher(query);
     QueryLifeTimeHookWithParseHooks mockHook = mock(QueryLifeTimeHookWithParseHooks.class);
@@ -71,7 +70,7 @@ public class TestQueryHooks {
   }
 
   @Test
-  public void testQueryLifeTimeWithParseHooksWithParseError() throws IllegalAccessException, ClassNotFoundException, InstantiationException, CommandNeedRetryException {
+  public void testQueryLifeTimeWithParseHooksWithParseError() throws Exception {
     String query = "invalidquery";
     ArgumentMatcher<QueryLifeTimeHookContext> argMatcher = new QueryLifeTimeHookContextMatcher(query);
     QueryLifeTimeHookWithParseHooks mockHook = mock(QueryLifeTimeHookWithParseHooks.class);
@@ -87,7 +86,7 @@ public class TestQueryHooks {
   }
 
   @Test
-  public void testQueryLifeTimeWithParseHooksWithCompileError() throws IllegalAccessException, ClassNotFoundException, InstantiationException, CommandNeedRetryException {
+  public void testQueryLifeTimeWithParseHooksWithCompileError() throws Exception {
     String query = "select * from foo";
     ArgumentMatcher<QueryLifeTimeHookContext> argMatcher = new QueryLifeTimeHookContextMatcher(query);
     QueryLifeTimeHookWithParseHooks mockHook = mock(QueryLifeTimeHookWithParseHooks.class);
@@ -103,7 +102,7 @@ public class TestQueryHooks {
   }
 
   @Test
-  public void testAllQueryLifeTimeHooks() throws IllegalAccessException, ClassNotFoundException, InstantiationException, CommandNeedRetryException {
+  public void testAllQueryLifeTimeHooks() throws Exception {
     String query = "select 1";
     ArgumentMatcher<QueryLifeTimeHookContext> argMatcher = new QueryLifeTimeHookContextMatcher(query);
     QueryLifeTimeHook mockHook = mock(QueryLifeTimeHook.class);
@@ -117,7 +116,7 @@ public class TestQueryHooks {
   }
 
   @Test
-  public void testQueryLifeTimeWithCompileError() throws IllegalAccessException, ClassNotFoundException, InstantiationException, CommandNeedRetryException {
+  public void testQueryLifeTimeWithCompileError() throws Exception {
     String query = "select * from foo";
     ArgumentMatcher<QueryLifeTimeHookContext> argMatcher = new QueryLifeTimeHookContextMatcher(query);
     QueryLifeTimeHook mockHook = mock(QueryLifeTimeHook.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
index 4da7d92..d2b9327 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
@@ -23,10 +23,9 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -38,7 +37,7 @@ import org.junit.Test;
 public class TestColumnAccess {
 
   @BeforeClass
-  public static void Setup() throws CommandNeedRetryException {
+  public static void Setup() throws Exception {
     Driver driver = createDriver();
     int ret = driver.run("create table t1(id1 int, name1 string)").getResponseCode();
     Assert.assertEquals("Checking command success", 0, ret);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
index 4ad821c..88edc12 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
@@ -27,7 +27,6 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -49,7 +48,7 @@ import org.junit.Test;
 public class TestReadEntityDirect {
 
   @BeforeClass
-  public static void onetimeSetup() throws CommandNeedRetryException {
+  public static void onetimeSetup() throws Exception {
     Driver driver = createDriver();
     int ret = driver.run("create table t1(i int)").getResponseCode();
     assertEquals("Checking command success", 0, ret);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/ql/src/test/org/apache/hadoop/hive/ql/processors/TestResetProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestResetProcessor.java b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestResetProcessor.java
index 99f3f6d..26e9083 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestResetProcessor.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestResetProcessor.java
@@ -22,7 +22,6 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.junit.Test;
@@ -35,14 +34,14 @@ import static org.mockito.Mockito.when;
 public class TestResetProcessor {
 
   @Test
-  public void testResetClosesSparkSession() throws CommandNeedRetryException {
+  public void testResetClosesSparkSession() throws Exception {
     SessionState mockSessionState = createMockSparkSessionState();
     new ResetProcessor().run(mockSessionState, "");
     verify(mockSessionState).closeSparkSession();
   }
 
   @Test
-  public void testResetExecutionEngineClosesSparkSession() throws CommandNeedRetryException {
+  public void testResetExecutionEngineClosesSparkSession() throws Exception {
     SessionState mockSessionState = createMockSparkSessionState();
     new ResetProcessor().run(mockSessionState, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname);
     verify(mockSessionState).closeSparkSession();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
----------------------------------------------------------------------
diff --git a/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon b/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
index ff7476e..366198e 100644
--- a/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
+++ b/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
@@ -179,7 +179,6 @@ org.apache.hadoop.hive.conf.HiveConf;
            <th>End Time</th>
            <th>Elapsed Time (s)</th>
            <th>Requires Lock</th>
-           <th>Retry If Fail</th>
         </tr>
 
        <%if queryInfo.getQueryDisplay() != null && queryInfo.getQueryDisplay().getTaskDisplays() != null %>
@@ -191,7 +190,6 @@ org.apache.hadoop.hive.conf.HiveConf;
                    <td><% taskDisplay.getEndTime() == null ? "" : new Date(taskDisplay.getEndTime()) %></td>
                    <td><% taskDisplay.getElapsedTime() == null ? "" : taskDisplay.getElapsedTime()/1000 %> (s) </td>
                    <td><% taskDisplay.isRequireLock() %></td>
-                   <td><% taskDisplay.isRetryIfFail() %></td>
                </tr>
            </%for>
        </%if>

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 9af0e35..5865abe 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -48,7 +48,6 @@ import org.apache.hadoop.hive.common.metrics.common.MetricsScope;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.QueryDisplay;
@@ -88,7 +87,6 @@ import org.codehaus.jackson.map.ObjectMapper;
  * SQLOperation.
  *
  */
-@SuppressWarnings("deprecation")
 public class SQLOperation extends ExecuteStatementOperation {
   private IDriver driver = null;
   private CommandProcessorResponse response;
@@ -197,8 +195,6 @@ public class SQLOperation extends ExecuteStatementOperation {
       // In Hive server mode, we are not able to retry in the FetchTask
       // case, when calling fetch queries since execute() has returned.
       // For now, we disable the test attempts.
-      driver.setTryCount(Integer.MAX_VALUE);
-
       response = driver.compileAndRespond(statement);
       if (0 != response.getResponseCode()) {
         throw toSQLException("Error while compiling statement", response);
@@ -249,7 +245,6 @@ public class SQLOperation extends ExecuteStatementOperation {
       // In Hive server mode, we are not able to retry in the FetchTask
       // case, when calling fetch queries since execute() has returned.
       // For now, we disable the test attempts.
-      driver.setTryCount(Integer.MAX_VALUE);
       response = driver.run();
       if (0 != response.getResponseCode()) {
         throw toSQLException("Error while processing statement", response);
@@ -494,8 +489,6 @@ public class SQLOperation extends ExecuteStatementOperation {
       return rowSet;
     } catch (IOException e) {
       throw new HiveSQLException(e);
-    } catch (CommandNeedRetryException e) {
-      throw new HiveSQLException(e);
     } catch (Exception e) {
       throw new HiveSQLException(e);
     } finally {


[3/3] hive git commit: HIVE-17991: Remove CommandNeedRetryException (Zoltan Haindrich reviewed by Ashutosh Chauhan)

Posted by kg...@apache.org.
HIVE-17991: Remove CommandNeedRetryException (Zoltan Haindrich reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f7dea106
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f7dea106
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f7dea106

Branch: refs/heads/master
Commit: f7dea1060247dddccd69112e24200ec84d2847a3
Parents: f942e72
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Wed Feb 7 09:37:57 2018 +0100
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Wed Feb 7 09:37:57 2018 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/cli/CliDriver.java   | 145 +++++++++----------
 .../hadoop/hive/cli/TestCliDriverMethods.java   |  11 +-
 .../org/apache/hive/hcatalog/cli/HCatCli.java   |   8 +-
 .../apache/hive/hcatalog/cli/HCatDriver.java    |   9 +-
 .../hive/hcatalog/cli/TestSemanticAnalysis.java |  50 +++----
 .../hive/hcatalog/cli/TestUseDatabase.java      |   3 +-
 .../hive/hcatalog/data/HCatDataCheckUtil.java   |  17 +--
 .../hive/hcatalog/data/TestReaderWriter.java    |   5 +-
 .../hcatalog/pig/AbstractHCatLoaderTest.java    |  17 +--
 .../hcatalog/pig/AbstractHCatStorerTest.java    |  30 ++--
 .../hive/hcatalog/pig/TestE2EScenarios.java     |   8 +-
 .../pig/TestHCatLoaderComplexSchema.java        |  30 ++--
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |  62 ++++----
 .../hive/hcatalog/pig/TestHCatStorer.java       |  31 ++--
 .../hive/hcatalog/pig/TestHCatStorerMulti.java  |   9 +-
 .../hcatalog/pig/TestHCatStorerWrapper.java     |   4 +-
 .../hcatalog/pig/TestParquetHCatLoader.java     |   5 -
 .../hcatalog/listener/TestMsgBusConnection.java |   3 +-
 .../hive/hcatalog/streaming/HiveEndPoint.java   |  34 ++---
 .../streaming/QueryFailedException.java         |   5 +-
 .../hive/hcatalog/streaming/TestStreaming.java  |  87 ++++++-----
 .../api/repl/commands/TestCommands.java         |  15 +-
 ...estDDLWithRemoteMetastoreSecondNamenode.java |  22 +--
 .../hive/ql/parse/TestReplicationScenarios.java |  88 ++++-------
 .../hadoop/hive/ql/parse/WarehouseInstance.java |  15 +-
 .../TestHiveAuthorizerCheckInvocation.java      |  52 +++----
 .../plugin/TestHiveAuthorizerShowFilters.java   |  26 ++--
 .../hive/ql/txn/compactor/TestCompactor.java    |   3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   8 +-
 .../hive/ql/CommandNeedRetryException.java      |  40 -----
 .../java/org/apache/hadoop/hive/ql/Context.java |  11 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  37 +----
 .../java/org/apache/hadoop/hive/ql/IDriver.java |   8 +-
 .../org/apache/hadoop/hive/ql/QueryDisplay.java |   6 -
 .../apache/hadoop/hive/ql/exec/FetchTask.java   |   7 +-
 .../org/apache/hadoop/hive/ql/exec/Task.java    |   8 -
 .../hive/ql/optimizer/GlobalLimitOptimizer.java |   2 +-
 .../hive/ql/parse/ExplainSemanticAnalyzer.java  |   3 -
 .../hadoop/hive/ql/parse/TaskCompiler.java      |  11 --
 .../hive/ql/processors/CommandProcessor.java    |   4 +-
 .../hive/ql/processors/CompileProcessor.java    |   3 +-
 .../hive/ql/processors/CryptoProcessor.java     |   3 +-
 .../hadoop/hive/ql/processors/DfsProcessor.java |   9 +-
 .../ql/processors/ListResourceProcessor.java    |   3 +-
 .../hive/ql/processors/ReloadProcessor.java     |   3 +-
 .../hive/ql/processors/ResetProcessor.java      |   5 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java    |   5 -
 .../ql/udf/generic/GenericUDTFGetSplits.java    |  11 +-
 .../hadoop/hive/ql/hooks/TestQueryHooks.java    |  11 +-
 .../hadoop/hive/ql/parse/TestColumnAccess.java  |   5 +-
 .../hive/ql/plan/TestReadEntityDirect.java      |   3 +-
 .../hive/ql/processors/TestResetProcessor.java  |   5 +-
 .../org/apache/hive/tmpl/QueryProfileTmpl.jamon |   2 -
 .../service/cli/operation/SQLOperation.java     |   7 -
 54 files changed, 372 insertions(+), 642 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
----------------------------------------------------------------------
diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index a78e0c6..e57412a 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -67,9 +67,7 @@ import org.apache.hadoop.hive.conf.Validator;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.IDriver;
-import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
 import org.apache.hadoop.hive.ql.exec.tez.TezJobExecHelper;
@@ -221,101 +219,88 @@ public class CliDriver {
   }
 
   int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) {
-    int tryCount = 0;
-    boolean needRetry;
     boolean escapeCRLF = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_ESCAPE_CRLF);
     int ret = 0;
 
-    do {
-      try {
-        needRetry = false;
-        if (proc != null) {
-          if (proc instanceof IDriver) {
-            IDriver qp = (IDriver) proc;
-            PrintStream out = ss.out;
-            long start = System.currentTimeMillis();
-            if (ss.getIsVerbose()) {
-              out.println(cmd);
-            }
+    if (proc != null) {
+      if (proc instanceof IDriver) {
+        IDriver qp = (IDriver) proc;
+        PrintStream out = ss.out;
+        long start = System.currentTimeMillis();
+        if (ss.getIsVerbose()) {
+          out.println(cmd);
+        }
 
-            qp.setTryCount(tryCount);
-            ret = qp.run(cmd).getResponseCode();
-            if (ret != 0) {
-              qp.close();
-              return ret;
-            }
+        ret = qp.run(cmd).getResponseCode();
+        if (ret != 0) {
+          qp.close();
+          return ret;
+        }
 
-            // query has run capture the time
-            long end = System.currentTimeMillis();
-            double timeTaken = (end - start) / 1000.0;
+        // query has run capture the time
+        long end = System.currentTimeMillis();
+        double timeTaken = (end - start) / 1000.0;
 
-            ArrayList<String> res = new ArrayList<String>();
+        ArrayList<String> res = new ArrayList<String>();
 
-            printHeader(qp, out);
+        printHeader(qp, out);
 
-            // print the results
-            int counter = 0;
-            try {
-              if (out instanceof FetchConverter) {
-                ((FetchConverter)out).fetchStarted();
-              }
-              while (qp.getResults(res)) {
-                for (String r : res) {
+        // print the results
+        int counter = 0;
+        try {
+          if (out instanceof FetchConverter) {
+            ((FetchConverter) out).fetchStarted();
+          }
+          while (qp.getResults(res)) {
+            for (String r : res) {
                   if (escapeCRLF) {
                     r = EscapeCRLFHelper.escapeCRLF(r);
                   }
-                  out.println(r);
-                }
-                counter += res.size();
-                res.clear();
-                if (out.checkError()) {
-                  break;
-                }
-              }
-            } catch (IOException e) {
-              console.printError("Failed with exception " + e.getClass().getName() + ":"
-                  + e.getMessage(), "\n"
-                  + org.apache.hadoop.util.StringUtils.stringifyException(e));
-              ret = 1;
+              out.println(r);
             }
-
-            int cret = qp.close();
-            if (ret == 0) {
-              ret = cret;
+            counter += res.size();
+            res.clear();
+            if (out.checkError()) {
+              break;
             }
+          }
+        } catch (IOException e) {
+          console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
+              "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+          ret = 1;
+        }
 
-            if (out instanceof FetchConverter) {
-              ((FetchConverter)out).fetchFinished();
-            }
+        int cret = qp.close();
+        if (ret == 0) {
+          ret = cret;
+        }
 
-            console.printInfo("Time taken: " + timeTaken + " seconds" +
-                (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
-          } else {
-            String firstToken = tokenizeCmd(cmd.trim())[0];
-            String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
+        if (out instanceof FetchConverter) {
+          ((FetchConverter) out).fetchFinished();
+        }
 
-            if (ss.getIsVerbose()) {
-              ss.out.println(firstToken + " " + cmd_1);
-            }
-            CommandProcessorResponse res = proc.run(cmd_1);
-            if (res.getResponseCode() != 0) {
-              ss.out.println("Query returned non-zero code: " + res.getResponseCode() +
-                  ", cause: " + res.getErrorMessage());
-            }
-            if (res.getConsoleMessages() != null) {
-              for (String consoleMsg : res.getConsoleMessages()) {
-                console.printInfo(consoleMsg);
-              }
-            }
-            ret = res.getResponseCode();
+        console.printInfo(
+            "Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
+      } else {
+        String firstToken = tokenizeCmd(cmd.trim())[0];
+        String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
+
+        if (ss.getIsVerbose()) {
+          ss.out.println(firstToken + " " + cmd_1);
+        }
+        CommandProcessorResponse res = proc.run(cmd_1);
+        if (res.getResponseCode() != 0) {
+          ss.out
+              .println("Query returned non-zero code: " + res.getResponseCode() + ", cause: " + res.getErrorMessage());
+        }
+        if (res.getConsoleMessages() != null) {
+          for (String consoleMsg : res.getConsoleMessages()) {
+            console.printInfo(consoleMsg);
           }
         }
-      } catch (CommandNeedRetryException e) {
-        console.printInfo("Retry query with a different approach...");
-        tryCount++;
-        needRetry = true;
+        ret = res.getResponseCode();
       }
-    } while (needRetry);
+    }
 
     return ret;
   }
@@ -398,7 +383,7 @@ public class CliDriver {
 
       // we can not use "split" function directly as ";" may be quoted
       List<String> commands = splitSemiColon(line);
-      
+
       String command = "";
       for (String oneCmd : commands) {
 
@@ -430,7 +415,7 @@ public class CliDriver {
       }
     }
   }
-  
+
   public static List<String> splitSemiColon(String line) {
     boolean insideSingleQuote = false;
     boolean insideDoubleQuote = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
----------------------------------------------------------------------
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
index 5bc9b69..c06ec3e 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
@@ -19,10 +19,8 @@ package org.apache.hadoop.hive.cli;
 
 
 import static org.mockito.Matchers.anyBoolean;
-import static org.mockito.Matchers.anyInt;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.times;
@@ -53,10 +51,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.util.Shell;
 
 
 // Cannot call class TestCliDriver since that's the name of the generated
@@ -80,7 +76,7 @@ public class TestCliDriverMethods extends TestCase {
   }
 
   // If the command has an associated schema, make sure it gets printed to use
-  public void testThatCliDriverPrintsHeaderForCommandsWithSchema() throws CommandNeedRetryException {
+  public void testThatCliDriverPrintsHeaderForCommandsWithSchema() {
     Schema mockSchema = mock(Schema.class);
     List<FieldSchema> fieldSchemas = new ArrayList<FieldSchema>();
     String fieldName = "FlightOfTheConchords";
@@ -94,8 +90,7 @@ public class TestCliDriverMethods extends TestCase {
   }
 
   // If the command has no schema, make sure nothing is printed
-  public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema()
-      throws CommandNeedRetryException {
+  public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() {
     Schema mockSchema = mock(Schema.class);
     when(mockSchema.getFieldSchemas()).thenReturn(null);
 
@@ -156,7 +151,7 @@ public class TestCliDriverMethods extends TestCase {
    * @throws CommandNeedRetryException
    *           won't actually be thrown
    */
-  private PrintStream headerPrintingTestDriver(Schema mockSchema) throws CommandNeedRetryException {
+  private PrintStream headerPrintingTestDriver(Schema mockSchema) {
     CliDriver cliDriver = new CliDriver();
 
     // We want the driver to try to print the header...

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
index ad31287..a36b0db 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
 import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -222,8 +221,9 @@ public class HCatCli {
   }
 
   private static void setConfProperties(HiveConf conf, Properties props) {
-    for (java.util.Map.Entry<Object, Object> e : props.entrySet())
+    for (java.util.Map.Entry<Object, Object> e : props.entrySet()) {
       conf.set((String) e.getKey(), (String) e.getValue());
+    }
   }
 
   private static int processLine(String line) {
@@ -307,10 +307,6 @@ public class HCatCli {
       ss.err.println("Failed with exception " + e.getClass().getName() + ":"
         + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
       ret = 1;
-    } catch (CommandNeedRetryException e) {
-      ss.err.println("Failed with exception " + e.getClass().getName() + ":"
-        + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      ret = 1;
     }
 
     int cret = driver.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
index 533f0bc..e112412 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -47,11 +46,7 @@ public class HCatDriver {
   public CommandProcessorResponse run(String command) {
 
     CommandProcessorResponse cpr = null;
-    try {
-      cpr = driver.run(command);
-    } catch (CommandNeedRetryException e) {
-      return new CommandProcessorResponse(-1, e.toString(), "");
-    }
+    cpr = driver.run(command);
 
     SessionState ss = SessionState.get();
 
@@ -153,7 +148,7 @@ public class HCatDriver {
     return driver.close();
   }
 
-  public boolean getResults(ArrayList<String> res) throws IOException, CommandNeedRetryException {
+  public boolean getResults(ArrayList<String> res) throws IOException {
     return driver.getResults(res);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
index 91d50df..d6386ab 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
@@ -22,11 +22,8 @@ import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -38,7 +35,6 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
-import org.apache.thrift.TException;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -79,7 +75,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testDescDB() throws CommandNeedRetryException, IOException {
+  public void testDescDB() throws Exception {
     hcatDriver.run("drop database mydb cascade");
     assertEquals(0, hcatDriver.run("create database mydb").getResponseCode());
     CommandProcessorResponse resp = hcatDriver.run("describe database mydb");
@@ -91,7 +87,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException {
+  public void testCreateTblWithLowerCasePartNames() throws Exception {
     driver.run("drop table junit_sem_analysis");
     CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
     assertEquals(resp.getResponseCode(), 0);
@@ -102,7 +98,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAlterTblFFpart() throws Exception {
 
     driver.run("drop table junit_sem_analysis");
     driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE");
@@ -124,13 +120,13 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testUsNonExistentDB() throws CommandNeedRetryException {
+  public void testUsNonExistentDB() throws Exception {
     CommandProcessorResponse resp = hcatDriver.run("use no_such_db");
     assertEquals(ErrorMsg.DATABASE_NOT_EXISTS.getErrorCode(), resp.getResponseCode());
   }
 
   @Test
-  public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
+  public void testDatabaseOperations() throws Exception {
 
     List<String> dbs = client.getAllDatabases();
     String testDb1 = "testdatabaseoperatons1";
@@ -158,7 +154,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testCreateTableIfNotExists() throws Exception {
 
     hcatDriver.run("drop table " + TBL_NAME);
     hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE");
@@ -183,7 +179,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTblTouch() throws CommandNeedRetryException {
+  public void testAlterTblTouch() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -197,7 +193,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testChangeColumns() throws CommandNeedRetryException {
+  public void testChangeColumns() throws Exception {
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
     CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int");
@@ -212,7 +208,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAddReplaceCols() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
@@ -234,7 +230,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTblClusteredBy() throws CommandNeedRetryException {
+  public void testAlterTblClusteredBy() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -244,7 +240,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTableRename() throws CommandNeedRetryException, TException {
+  public void testAlterTableRename() throws Exception {
     hcatDriver.run("drop table oldname");
     hcatDriver.run("drop table newname");
     hcatDriver.run("create table oldname (a int)");
@@ -264,7 +260,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAlterTableSetFF() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -285,7 +281,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddPartFail() throws CommandNeedRetryException {
+  public void testAddPartFail() throws Exception {
 
     driver.run("drop table junit_sem_analysis");
     driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -295,7 +291,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddPartPass() throws IOException, CommandNeedRetryException {
+  public void testAddPartPass() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
@@ -306,7 +302,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCTAS() throws CommandNeedRetryException {
+  public void testCTAS() throws Exception {
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) as select * from tbl2";
     CommandProcessorResponse response = hcatDriver.run(query);
@@ -316,7 +312,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testStoredAs() throws CommandNeedRetryException {
+  public void testStoredAs() throws Exception {
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int)";
     CommandProcessorResponse response = hcatDriver.run(query);
@@ -325,7 +321,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testAddDriverInfo() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as " +
@@ -341,7 +337,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException {
+  public void testInvalidateNonStringPartition() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b int)  stored as RCFILE";
@@ -354,7 +350,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException {
+  public void testInvalidateSeqFileStoredAs() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as SEQUENCEFILE";
@@ -365,7 +361,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException {
+  public void testInvalidateTextFileStoredAs() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as TEXTFILE";
@@ -376,7 +372,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException {
+  public void testInvalidateClusteredBy() throws Exception {
 
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
@@ -386,7 +382,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCTLFail() throws IOException, CommandNeedRetryException {
+  public void testCTLFail() throws Exception {
 
     driver.run("drop table junit_sem_analysis");
     driver.run("drop table like_table");
@@ -399,7 +395,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
   }
 
   @Test
-  public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+  public void testCTLPass() throws Exception {
 
     try {
       hcatDriver.run("drop table junit_sem_analysis");

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
index 58f9086..a8aafb1 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
@@ -26,7 +26,6 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -55,7 +54,7 @@ public class TestUseDatabase extends TestCase {
   private final String dbName = "testUseDatabase_db";
   private final String tblName = "testUseDatabase_tbl";
 
-  public void testAlterTablePass() throws IOException, CommandNeedRetryException {
+  public void testAlterTablePass() throws Exception {
 
     hcatDriver.run("create database " + dbName);
     hcatDriver.run("use " + dbName);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
index 859da72..1c6ad9b 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
@@ -26,7 +26,6 @@ import java.util.Map.Entry;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -65,8 +64,7 @@ public class HCatDataCheckUtil {
     MiniCluster.createInputFile(cluster, fileName, input);
   }
 
-  public static void createTable(IDriver driver, String tableName, String createTableArgs)
-    throws CommandNeedRetryException, IOException {
+  public static void createTable(IDriver driver, String tableName, String createTableArgs) throws IOException {
     String createTable = "create table " + tableName + createTableArgs;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
@@ -74,12 +72,11 @@ public class HCatDataCheckUtil {
     }
   }
 
-  public static void dropTable(IDriver driver, String tablename) throws IOException, CommandNeedRetryException {
+  public static void dropTable(IDriver driver, String tablename) throws IOException {
     driver.run("drop table if exists " + tablename);
   }
 
-  public static ArrayList<String> formattedRun(IDriver driver, String name, String selectCmd)
-    throws CommandNeedRetryException, IOException {
+  public static ArrayList<String> formattedRun(IDriver driver, String name, String selectCmd) throws IOException {
     driver.run(selectCmd);
     ArrayList<String> src_values = new ArrayList<String>();
     driver.getResults(src_values);
@@ -91,7 +88,7 @@ public class HCatDataCheckUtil {
   public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
     return recordsEqual(first, second, null);
   }
-  public static boolean recordsEqual(HCatRecord first, HCatRecord second, 
+  public static boolean recordsEqual(HCatRecord first, HCatRecord second,
                                      StringBuilder debugDetail) {
     return (compareRecords(first, second, debugDetail) == 0);
   }
@@ -99,12 +96,12 @@ public class HCatDataCheckUtil {
   public static int compareRecords(HCatRecord first, HCatRecord second) {
     return compareRecords(first, second, null);
   }
-  public static int compareRecords(HCatRecord first, HCatRecord second, 
+  public static int compareRecords(HCatRecord first, HCatRecord second,
                                    StringBuilder debugDetail) {
     return compareRecordContents(first.getAll(), second.getAll(), debugDetail);
   }
 
-  public static int compareRecordContents(List<Object> first, List<Object> second, 
+  public static int compareRecordContents(List<Object> first, List<Object> second,
                                           StringBuilder debugDetail) {
     int mySz = first.size();
     int urSz = second.size();
@@ -118,7 +115,7 @@ public class HCatDataCheckUtil {
             String msg = "first.get(" + i + "}='" + first.get(i) + "' second.get(" +
                     i + ")='" + second.get(i) + "' compared as " + c + "\n" +
             "Types 1st/2nd=" + DataType.findType(first.get(i)) + "/" +DataType.findType(
-                    second.get(i)) + '\n' + 
+                    second.get(i)) + '\n' +
                     "first='" + first.get(i) + "' second='" + second.get(i) + "'";
             if(first.get(i) instanceof Date) {
               msg += "\n((Date)first.get(i)).getTime()=" + ((Date)first.get(i)).getTime();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
index 818e712..4224f1e 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestReaderWriter.java
@@ -33,7 +33,6 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.transfer.DataTransferFactory;
 import org.apache.hive.hcatalog.data.transfer.HCatReader;
@@ -49,7 +48,7 @@ import org.junit.Test;
 public class TestReaderWriter extends HCatBaseTest {
 
   @Test
-  public void test() throws MetaException, CommandNeedRetryException,
+  public void test() throws Exception,
       IOException, ClassNotFoundException {
 
     driver.run("drop table mytbl");
@@ -98,7 +97,7 @@ public class TestReaderWriter extends HCatBaseTest {
     }
   }
 
-  private WriterContext runsInMaster(Map<String, String> config) throws HCatException {
+  private WriterContext runsInMaster(Map<String, String> config) throws Exception {
 
     WriteEntity.Builder builder = new WriteEntity.Builder();
     WriteEntity entity = builder.withTable("mytbl").build();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
index e0fc02e..14b22ed 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
@@ -42,7 +42,6 @@ import java.util.Properties;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -92,20 +91,20 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
     this.storageFormat = getStorageFormat();
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     dropTable(tablename, driver);
   }
 
-  static void dropTable(String tablename, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void dropTable(String tablename, IDriver driver) throws Exception {
     driver.run("drop table if exists " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
   static void createTable(String tablename, String schema, String partitionedBy, IDriver driver, String storageFormat)
-      throws IOException, CommandNeedRetryException {
+      throws Exception {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
     if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
@@ -117,7 +116,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
     executeStatementOnDriver(createTable, driver);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -125,7 +124,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws Exception {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {
@@ -332,7 +331,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
   }
 
   @Test
-  public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
+  public void testReadPartitionedBasic() throws Exception {
     PigServer server = createPigServer(false);
 
     driver.run("select * from " + PARTITIONED_TABLE);
@@ -399,7 +398,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
   }
 
   @Test
-  public void testReadMissingPartitionBasicNeg() throws IOException, CommandNeedRetryException {
+  public void testReadMissingPartitionBasicNeg() throws Exception {
     PigServer server = createPigServer(false);
 
     File removedPartitionDir = new File(TEST_WAREHOUSE_DIR + "/" + PARTITIONED_TABLE + "/bkt=0");

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
index 40581e6..30b0047 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
@@ -35,7 +35,6 @@ import java.util.List;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
@@ -416,7 +415,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testPartColsInData() throws IOException, CommandNeedRetryException {
+  public void testPartColsInData() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int", "b string", driver, storageFormat);
 
@@ -539,7 +538,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testNoAlias() throws IOException, CommandNeedRetryException {
+  public void testNoAlias() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_parted", driver);
     AbstractHCatLoaderTest.createTable("junit_parted","a int, b string", "ds string", driver, storageFormat);
     PigServer server = new PigServer(ExecType.LOCAL);
@@ -583,7 +582,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+  public void testStoreMultiTables() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -634,7 +633,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoSchema() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -670,7 +669,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoCtorArgs() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -706,7 +705,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testEmptyStore() throws IOException, CommandNeedRetryException {
+  public void testEmptyStore() throws Exception {
 
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null, driver, storageFormat);
@@ -739,7 +738,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testBagNStruct() throws IOException, CommandNeedRetryException {
+  public void testBagNStruct() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted",
         "b string,a struct<a1:int>,  arr_of_struct array<string>, " +
@@ -781,7 +780,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncAllSimpleTypes() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted",
         "a int, b float, c double, d bigint, e string, h boolean, f binary, g binary", null,
@@ -840,7 +839,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncSimple() throws Exception {
     AbstractHCatLoaderTest.dropTable("junit_unparted", driver);
     AbstractHCatLoaderTest.createTable("junit_unparted","a int, b string", null,
         driver, storageFormat);
@@ -878,8 +877,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws Exception {
     AbstractHCatLoaderTest.dropTable("employee", driver);
     AbstractHCatLoaderTest.createTable("employee",
         "emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING",
@@ -912,8 +910,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws Exception {
     AbstractHCatLoaderTest.dropTable("employee", driver);
     AbstractHCatLoaderTest.createTable("employee",
         "emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING",
@@ -945,8 +942,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws Exception {
     AbstractHCatLoaderTest.dropTable("employee", driver);
     AbstractHCatLoaderTest.createTable("employee",
         "emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING",
@@ -972,7 +968,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
   }
 
   @Test
-  public void testPartitionPublish() throws IOException, CommandNeedRetryException {
+  public void testPartitionPublish() throws Exception {
     AbstractHCatLoaderTest.dropTable("ptn_fail", driver);
     AbstractHCatLoaderTest.createTable("ptn_fail","a int, c string", "b string",
         driver, storageFormat);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
index e449729..e0f1c89 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -108,15 +107,16 @@ public class TestE2EScenarios {
     }
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     driver.run("drop table " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy, String storageFormat) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy, String storageFormat)
+      throws Exception {
    AbstractHCatLoaderTest.createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  private void driverRun(String cmd) throws IOException, CommandNeedRetryException {
+  private void driverRun(String cmd) throws Exception {
     int retCode = driver.run(cmd).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to run ["

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
index 411c165..9cb1477 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
@@ -18,7 +18,10 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import java.io.IOException;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assume.assumeTrue;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -31,15 +34,11 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-
-import org.apache.hadoop.util.Shell;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -51,19 +50,13 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
-
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assume.assumeTrue;
-
 @RunWith(Parameterized.class)
 public class TestHCatLoaderComplexSchema {
 
@@ -93,15 +86,15 @@ public class TestHCatLoaderComplexSchema {
     this.storageFormat = storageFormat;
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     driver.run("drop table " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     AbstractHCatLoaderTest.createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -209,12 +202,13 @@ public class TestHCatLoaderComplexSchema {
     verifyWriteRead("testSyntheticComplexSchema2", pigSchema, tableSchema2, data, false);
   }
 
-  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data, boolean provideSchemaToStorer)
-    throws IOException, CommandNeedRetryException, ExecException, FrontendException {
+  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data,
+      boolean provideSchemaToStorer) throws Exception {
     verifyWriteRead(tablename, pigSchema, tableSchema, data, data, provideSchemaToStorer);
   }
-  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data, List<Tuple> result, boolean provideSchemaToStorer)
-    throws IOException, CommandNeedRetryException, ExecException, FrontendException {
+
+  private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data,
+      List<Tuple> result, boolean provideSchemaToStorer) throws Exception {
     MockLoader.setData(tablename + "Input", data);
     try {
       createTable(tablename, tableSchema);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index b70a952..1560571 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -18,6 +18,24 @@
  */
 package org.apache.hive.hcatalog.pig;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -27,7 +45,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
@@ -60,24 +77,6 @@ import org.junit.runners.Parameterized;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
-
 @RunWith(Parameterized.class)
 public class TestHCatLoaderEncryption {
   private static final AtomicInteger salt = new AtomicInteger(new Random().nextInt());
@@ -112,20 +111,20 @@ public class TestHCatLoaderEncryption {
     this.storageFormat = storageFormat;
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     dropTable(tablename, driver);
   }
 
-  static void dropTable(String tablename, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void dropTable(String tablename, IDriver driver) throws Exception {
     driver.run("drop table if exists " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
   static void createTable(String tablename, String schema, String partitionedBy, IDriver driver, String storageFormat)
-      throws IOException, CommandNeedRetryException {
+      throws Exception {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
     if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
@@ -135,7 +134,7 @@ public class TestHCatLoaderEncryption {
     executeStatementOnDriver(createTable, driver);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -143,7 +142,7 @@ public class TestHCatLoaderEncryption {
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
    */
-  static void executeStatementOnDriver(String cmd, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void executeStatementOnDriver(String cmd, IDriver driver) throws Exception {
     LOG.debug("Executing: " + cmd);
     CommandProcessorResponse cpr = driver.run(cmd);
     if(cpr.getResponseCode() != 0) {
@@ -173,7 +172,7 @@ public class TestHCatLoaderEncryption {
     String s = hiveConf.get("hdfs.minidfs.basedir");
     if(s == null || s.length() <= 0) {
       //return System.getProperty("test.build.data", "build/test/data") + "/dfs/";
-      hiveConf.set("hdfs.minidfs.basedir", 
+      hiveConf.set("hdfs.minidfs.basedir",
         System.getProperty("test.build.data", "build/test/data") + "_" + System.currentTimeMillis() +
           "_" + salt.getAndIncrement() + "/dfs/");
     }
@@ -237,12 +236,14 @@ public class TestHCatLoaderEncryption {
     }
   }
 
-  private void associateEncryptionZoneWithPath(String path) throws SQLException, CommandNeedRetryException {
+  private void associateEncryptionZoneWithPath(String path) throws Exception {
     LOG.info(this.storageFormat + ": associateEncryptionZoneWithPath");
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     enableTestOnlyCmd(SessionState.get().getConf());
     CommandProcessor crypto = getTestCommand("crypto");
-    if (crypto == null) return;
+    if (crypto == null) {
+      return;
+    }
     checkExecutionResponse(crypto.run("CREATE_KEY --keyName key_128 --bitLength 128"));
     checkExecutionResponse(crypto.run("CREATE_ZONE --keyName key_128 --path " + path));
   }
@@ -255,7 +256,7 @@ public class TestHCatLoaderEncryption {
     assertEquals("Crypto command failed with the exit code" + rc, 0, rc);
   }
 
-  private void removeEncryptionZone() throws SQLException, CommandNeedRetryException {
+  private void removeEncryptionZone() throws Exception {
     LOG.info(this.storageFormat + ": removeEncryptionZone");
     enableTestOnlyCmd(SessionState.get().getConf());
     CommandProcessor crypto = getTestCommand("crypto");
@@ -394,7 +395,8 @@ public class TestHCatLoaderEncryption {
     }
   }
 
-  static void createTableInSpecifiedPath(String tableName, String schema, String path, IDriver driver) throws IOException, CommandNeedRetryException {
+  static void createTableInSpecifiedPath(String tableName, String schema, String path, IDriver driver)
+      throws Exception {
     String createTableStr;
     createTableStr = "create table " + tableName + "(" + schema + ") location \'" + path + "\'";
     executeStatementOnDriver(createTableStr, driver);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
index 51ddd90..477ea66 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
@@ -20,14 +20,12 @@ package org.apache.hive.hcatalog.pig;
 
 import static org.junit.Assume.assumeTrue;
 
-import java.io.IOException;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.junit.Test;
@@ -190,7 +188,7 @@ public class TestHCatStorer extends AbstractHCatStorerTest {
 
   @Test
   @Override
-  public void testPartColsInData() throws IOException, CommandNeedRetryException {
+  public void testPartColsInData() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testPartColsInData();
   }
@@ -211,87 +209,84 @@ public class TestHCatStorer extends AbstractHCatStorerTest {
 
   @Test
   @Override
-  public void testNoAlias() throws IOException, CommandNeedRetryException {
+  public void testNoAlias() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testNoAlias();
   }
 
   @Test
   @Override
-  public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+  public void testStoreMultiTables() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreMultiTables();
   }
 
   @Test
   @Override
-  public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoSchema() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreWithNoSchema();
   }
 
   @Test
   @Override
-  public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+  public void testStoreWithNoCtorArgs() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreWithNoCtorArgs();
   }
 
   @Test
   @Override
-  public void testEmptyStore() throws IOException, CommandNeedRetryException {
+  public void testEmptyStore() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testEmptyStore();
   }
 
   @Test
   @Override
-  public void testBagNStruct() throws IOException, CommandNeedRetryException {
+  public void testBagNStruct() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testBagNStruct();
   }
 
   @Test
   @Override
-  public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncAllSimpleTypes() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreFuncAllSimpleTypes();
   }
 
   @Test
   @Override
-  public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+  public void testStoreFuncSimple() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testStoreFuncSimple();
   }
 
   @Test
   @Override
-  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testDynamicPartitioningMultiPartColsInDataPartialSpec();
   }
 
   @Test
   @Override
-  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testDynamicPartitioningMultiPartColsInDataNoSpec();
   }
 
   @Test
   @Override
-  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException,
-      CommandNeedRetryException {
+  public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testDynamicPartitioningMultiPartColsNoDataInDataNoSpec();
   }
 
   @Test
   @Override
-  public void testPartitionPublish() throws IOException, CommandNeedRetryException {
+  public void testPartitionPublish() throws Exception {
     assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     super.testPartitionPublish();
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
index 3cadea4..d6b3ebc 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
@@ -30,7 +30,6 @@ import java.util.Set;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
@@ -79,15 +78,15 @@ public class TestHCatStorerMulti {
     this.storageFormat = storageFormat;
   }
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+  private void dropTable(String tablename) throws Exception {
     driver.run("drop table " + tablename);
   }
 
-  private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema, String partitionedBy) throws Exception {
     AbstractHCatLoaderTest.createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
 
-  private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
+  private void createTable(String tablename, String schema) throws Exception {
     createTable(tablename, schema, null);
   }
 
@@ -212,7 +211,7 @@ public class TestHCatStorerMulti {
     writer.close();
   }
 
-  private void cleanup() throws IOException, CommandNeedRetryException {
+  private void cleanup() throws Exception {
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
index aac2002..0ffab4a 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.UUID;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-
 import org.apache.hive.hcatalog.HcatTestUtils;
 
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
@@ -47,7 +45,7 @@ public class TestHCatStorerWrapper extends HCatBaseTest {
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
   @Test
-  public void testStoreExternalTableWithExternalDir() throws IOException, CommandNeedRetryException{
+  public void testStoreExternalTableWithExternalDir() throws Exception {
 
     File tmpExternalDir = new File(TEST_DATA_DIR, UUID.randomUUID().toString());
     tmpExternalDir.deleteOnExit();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
index b98e1a7..9b51524 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestParquetHCatLoader.java
@@ -18,12 +18,7 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import java.io.IOException;
-
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.io.IOConstants;
-import org.junit.Ignore;
-import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
----------------------------------------------------------------------
diff --git a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
index 61787c9..729a5e7 100644
--- a/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
+++ b/hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java
@@ -37,7 +37,6 @@ import org.apache.activemq.broker.BrokerService;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -114,7 +113,7 @@ public class TestMsgBusConnection {
     assertEquals("testconndb", messageObject.getDB());
   }
 
-  private void runQuery(String query) throws CommandNeedRetryException {
+  private void runQuery(String query) throws Exception {
     CommandProcessorResponse cpr = driver.run(query);
     assertFalse(cpr.getMessage(), cpr.failed());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index 8943423..3388a34 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -55,7 +54,6 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Information about the hive end point (i.e. table or partition) to write to.
@@ -102,6 +100,7 @@ public class HiveEndPoint {
   /**
    * @deprecated As of release 1.3/2.1.  Replaced by {@link #newConnection(boolean, String)}
    */
+  @Deprecated
   public StreamingConnection newConnection(final boolean createPartIfNotExists)
     throws ConnectionError, InvalidPartition, InvalidTable, PartitionCreationFailed
     , ImpersonationFailed , InterruptedException {
@@ -110,6 +109,7 @@ public class HiveEndPoint {
   /**
    * @deprecated As of release 1.3/2.1.  Replaced by {@link #newConnection(boolean, HiveConf, String)}
    */
+  @Deprecated
   public StreamingConnection newConnection(final boolean createPartIfNotExists, HiveConf conf)
     throws ConnectionError, InvalidPartition, InvalidTable, PartitionCreationFailed
     , ImpersonationFailed , InterruptedException {
@@ -118,6 +118,7 @@ public class HiveEndPoint {
   /**
    * @deprecated As of release 1.3/2.1.  Replaced by {@link #newConnection(boolean, HiveConf, UserGroupInformation, String)}
    */
+  @Deprecated
   public StreamingConnection newConnection(final boolean createPartIfNotExists, final HiveConf conf,
                                            final UserGroupInformation authenticatedUser)
     throws ConnectionError, InvalidPartition,
@@ -232,7 +233,9 @@ public class HiveEndPoint {
 
   @Override
   public boolean equals(Object o) {
-    if (this == o) return true;
+    if (this == o) {
+      return true;
+    }
     if (o == null || getClass() != o.getClass()) {
       return false;
     }
@@ -412,6 +415,7 @@ public class HiveEndPoint {
      * @throws ImpersonationFailed failed to run command as proxyUser
      * @throws InterruptedException
      */
+    @Override
     public TransactionBatch fetchTransactionBatch(final int numTransactions,
                                                       final RecordWriter recordWriter)
             throws StreamingException, TransactionBatchUnAvailable, ImpersonationFailed
@@ -490,22 +494,11 @@ public class HiveEndPoint {
     }
 
     private static boolean runDDL(IDriver driver, String sql) throws QueryFailedException {
-      int retryCount = 1; // # of times to retry if first attempt fails
-      for (int attempt=0; attempt<=retryCount; ++attempt) {
-        try {
-          if (LOG.isDebugEnabled()) {
-            LOG.debug("Running Hive Query: "+ sql);
-          }
-          driver.run(sql);
-          return true;
-        } catch (CommandNeedRetryException e) {
-          if (attempt==retryCount) {
-            throw new QueryFailedException(sql, e);
-          }
-          continue;
-        }
-      } // for
-      return false;
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Running Hive Query: " + sql);
+      }
+      driver.run(sql);
+      return true;
     }
 
     private static String partSpecStr(List<FieldSchema> partKeys, ArrayList<String> partVals) {
@@ -687,9 +680,10 @@ public class HiveEndPoint {
 
     private void beginNextTransactionImpl() throws TransactionError {
       state = TxnState.INACTIVE;//clear state from previous txn
-      if ( currentTxnIndex + 1 >= txnIds.size() )
+      if ( currentTxnIndex + 1 >= txnIds.size() ) {
         throw new InvalidTrasactionState("No more transactions available in" +
                 " current batch for end point : " + endPt);
+      }
       ++currentTxnIndex;
       state = TxnState.OPEN;
       lastTxnUsed = getCurrentTxnId();

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
index b41e85f..f78be7f 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/QueryFailedException.java
@@ -18,11 +18,10 @@
 
 package org.apache.hive.hcatalog.streaming;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-
 public class QueryFailedException extends StreamingException {
   String query;
-  public QueryFailedException(String query, CommandNeedRetryException e) {
+
+  public QueryFailedException(String query, Exception e) {
     super("Query failed: " + query + ". Due to :" + e.getMessage(), e);
     this.query = query;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/f7dea106/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index 5e12614..4e92812 100644
--- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -66,7 +66,6 @@ import org.apache.hadoop.hive.metastore.api.TxnState;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
@@ -538,6 +537,7 @@ public class TestStreaming {
    * @deprecated use {@link #checkDataWritten2(Path, long, long, int, String, boolean, String...)} -
    * there is little value in using InputFormat directly
    */
+  @Deprecated
   private void checkDataWritten(Path partitionPath, long minTxn, long maxTxn, int buckets, int numExpectedFiles,
                                 String... records) throws Exception {
     ValidTxnList txns = msClient.getValidTxns();
@@ -546,15 +546,21 @@ public class TestStreaming {
     Assert.assertEquals(0, dir.getOriginalFiles().size());
     List<AcidUtils.ParsedDelta> current = dir.getCurrentDirectories();
     System.out.println("Files found: ");
-    for (AcidUtils.ParsedDelta pd : current) System.out.println(pd.getPath().toString());
+    for (AcidUtils.ParsedDelta pd : current) {
+      System.out.println(pd.getPath().toString());
+    }
     Assert.assertEquals(numExpectedFiles, current.size());
 
     // find the absolute minimum transaction
     long min = Long.MAX_VALUE;
     long max = Long.MIN_VALUE;
     for (AcidUtils.ParsedDelta pd : current) {
-      if (pd.getMaxTransaction() > max) max = pd.getMaxTransaction();
-      if (pd.getMinTransaction() < min) min = pd.getMinTransaction();
+      if (pd.getMaxTransaction() > max) {
+        max = pd.getMaxTransaction();
+      }
+      if (pd.getMinTransaction() < min) {
+        min = pd.getMinTransaction();
+      }
     }
     Assert.assertEquals(minTxn, min);
     Assert.assertEquals(maxTxn, max);
@@ -593,15 +599,21 @@ public class TestStreaming {
     Assert.assertEquals(0, dir.getOriginalFiles().size());
     List<AcidUtils.ParsedDelta> current = dir.getCurrentDirectories();
     System.out.println("Files found: ");
-    for (AcidUtils.ParsedDelta pd : current) System.out.println(pd.getPath().toString());
+    for (AcidUtils.ParsedDelta pd : current) {
+      System.out.println(pd.getPath().toString());
+    }
     Assert.assertEquals(numExpectedFiles, current.size());
 
     // find the absolute minimum transaction
     long min = Long.MAX_VALUE;
     long max = Long.MIN_VALUE;
     for (AcidUtils.ParsedDelta pd : current) {
-      if (pd.getMaxTransaction() > max) max = pd.getMaxTransaction();
-      if (pd.getMinTransaction() < min) min = pd.getMinTransaction();
+      if (pd.getMaxTransaction() > max) {
+        max = pd.getMaxTransaction();
+      }
+      if (pd.getMinTransaction() < min) {
+        min = pd.getMinTransaction();
+      }
     }
     Assert.assertEquals(minTxn, min);
     Assert.assertEquals(maxTxn, max);
@@ -811,7 +823,7 @@ public class TestStreaming {
         txnBatch.heartbeat();
       }
     }
-    
+
   }
   @Test
   public void testTransactionBatchEmptyAbort() throws Exception {
@@ -978,7 +990,7 @@ public class TestStreaming {
       , txnBatch.getCurrentTransactionState());
     connection.close();
   }
-  
+
   @Test
   public void testTransactionBatchCommit_Json() throws Exception {
     HiveEndPoint endPt = new HiveEndPoint(metaStoreURI, dbName, tblName,
@@ -2024,7 +2036,7 @@ public class TestStreaming {
     }
     Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"),
       expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
-    
+
     r = msClient.showTxns();
     Assert.assertEquals("HWM didn't match", 21, r.getTxn_high_water_mark());
     ti = r.getOpen_txns();
@@ -2041,12 +2053,14 @@ public class TestStreaming {
     HashMap<Integer, ArrayList<SampleRec>> result = new HashMap<Integer, ArrayList<SampleRec>>();
 
     for (File deltaDir : new File(dbLocation + "/" + tableName).listFiles()) {
-      if(!deltaDir.getName().startsWith("delta"))
+      if(!deltaDir.getName().startsWith("delta")) {
         continue;
+      }
       File[] bucketFiles = deltaDir.listFiles();
       for (File bucketFile : bucketFiles) {
-        if(bucketFile.toString().endsWith("length"))
+        if(bucketFile.toString().endsWith("length")) {
           continue;
+        }
         Integer bucketNum = getBucketNumber(bucketFile);
         ArrayList<SampleRec>  recs = dumpBucket(new Path(bucketFile.toString()));
         result.put(bucketNum, recs);
@@ -2106,14 +2120,15 @@ public class TestStreaming {
     return new Path(tableLoc);
   }
 
-  private static Path addPartition(IDriver driver, String tableName, List<String> partVals, String[] partNames) throws QueryFailedException, CommandNeedRetryException, IOException {
+  private static Path addPartition(IDriver driver, String tableName, List<String> partVals, String[] partNames)
+      throws Exception {
     String partSpec = getPartsSpec(partNames, partVals);
     String addPart = "alter table " + tableName + " add partition ( " + partSpec  + " )";
     runDDL(driver, addPart);
     return getPartitionPath(driver, tableName, partSpec);
   }
 
-  private static Path getPartitionPath(IDriver driver, String tableName, String partSpec) throws CommandNeedRetryException, IOException {
+  private static Path getPartitionPath(IDriver driver, String tableName, String partSpec) throws Exception {
     ArrayList<String> res = queryTable(driver, "describe extended " + tableName + " PARTITION (" + partSpec + ")");
     String partInfo = res.get(res.size() - 1);
     int start = partInfo.indexOf("location:") + "location:".length();
@@ -2160,8 +2175,9 @@ public class TestStreaming {
   }
 
   private static String join(String[] values, String delimiter) {
-    if(values==null)
+    if(values==null) {
       return null;
+    }
     StringBuilder strbuf = new StringBuilder();
 
     boolean first = true;
@@ -2183,28 +2199,17 @@ public class TestStreaming {
   private static boolean runDDL(IDriver driver, String sql) throws QueryFailedException {
     LOG.debug(sql);
     System.out.println(sql);
-    int retryCount = 1; // # of times to retry if first attempt fails
-    for (int attempt=0; attempt <= retryCount; ++attempt) {
-      try {
-        //LOG.debug("Running Hive Query: "+ sql);
-        CommandProcessorResponse cpr = driver.run(sql);
-        if(cpr.getResponseCode() == 0) {
-          return true;
-        }
-        LOG.error("Statement: " + sql + " failed: " + cpr);
-      } catch (CommandNeedRetryException e) {
-        if (attempt == retryCount) {
-          throw new QueryFailedException(sql, e);
-        }
-        continue;
-      }
-    } // for
+    //LOG.debug("Running Hive Query: "+ sql);
+    CommandProcessorResponse cpr = driver.run(sql);
+    if (cpr.getResponseCode() == 0) {
+      return true;
+    }
+    LOG.error("Statement: " + sql + " failed: " + cpr);
     return false;
   }
 
 
-  public static ArrayList<String> queryTable(IDriver driver, String query)
-          throws CommandNeedRetryException, IOException {
+  public static ArrayList<String> queryTable(IDriver driver, String query) throws IOException {
     CommandProcessorResponse cpr = driver.run(query);
     if(cpr.getResponseCode() != 0) {
       throw new RuntimeException(query + " failed: " + cpr);
@@ -2227,13 +2232,21 @@ public class TestStreaming {
 
     @Override
     public boolean equals(Object o) {
-      if (this == o) return true;
-      if (o == null || getClass() != o.getClass()) return false;
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
 
       SampleRec that = (SampleRec) o;
 
-      if (field2 != that.field2) return false;
-      if (field1 != null ? !field1.equals(that.field1) : that.field1 != null) return false;
+      if (field2 != that.field2) {
+        return false;
+      }
+      if (field1 != null ? !field1.equals(that.field1) : that.field1 != null) {
+        return false;
+      }
       return !(field3 != null ? !field3.equals(that.field3) : that.field3 != null);
 
     }