You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mg...@apache.org on 2019/09/05 13:43:57 UTC

[hive] branch master updated: HIVE-21996 Remove unused code from Driver (Miklos Gergely reviewd by Jesus Camacho Rodriguez)

This is an automated email from the ASF dual-hosted git repository.

mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 62676b0  HIVE-21996 Remove unused code from Driver (Miklos Gergely reviewd by Jesus Camacho Rodriguez)
62676b0 is described below

commit 62676b008db993daa9df077c849b2d45692afb16
Author: miklosgergely <mg...@cloudera.com>
AuthorDate: Mon Jul 15 23:02:49 2019 +0200

    HIVE-21996 Remove unused code from Driver (Miklos Gergely reviewd by Jesus Camacho Rodriguez)
---
 .../hadoop/hive/ql/TestCreateUdfEntities.java      |  9 ++-
 .../hive/ql/exec/spark/TestSparkStatistics.java    |  4 +-
 .../plugin/TestHiveAuthorizerCheckInvocation.java  | 49 ++++++------
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  | 86 ++--------------------
 ql/src/java/org/apache/hadoop/hive/ql/IDriver.java |  2 -
 .../apache/hadoop/hive/ql/reexec/ReExecDriver.java | 12 ++-
 .../hive/ql/exec/spark/TestHiveSparkClient.java    |  4 +-
 .../hadoop/hive/ql/exec/spark/TestSparkPlan.java   |  3 +-
 .../org/apache/hadoop/hive/ql/hooks/TestHooks.java |  2 +-
 .../hive/ql/io/TestSymlinkTextInputFormat.java     |  8 +-
 .../hadoop/hive/ql/parse/TestColumnAccess.java     |  6 +-
 .../hadoop/hive/ql/parse/TestHiveDecimalParse.java | 16 ++--
 .../hadoop/hive/ql/plan/TestReadEntityDirect.java  | 10 +--
 .../apache/hadoop/hive/ql/plan/TestViewEntity.java | 10 +--
 .../TestHivePrivilegeObjectOwnerNameAndType.java   |  7 +-
 15 files changed, 72 insertions(+), 156 deletions(-)

diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
index 1894ba0..325831e 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.*;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.reexec.ReExecDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.After;
 import org.junit.Before;
@@ -48,8 +49,8 @@ public class TestCreateUdfEntities {
 
   @Test
   public void testUdfWithLocalResource() throws Exception {
-    int rc = driver.compile("CREATE FUNCTION " + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "
-            + " using file '" + "file:///tmp/udf1.jar'");
+    int rc = ((ReExecDriver)driver).compile("CREATE FUNCTION " + funcName + " AS " +
+        "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf'  using file '" + "file:///tmp/udf1.jar'", true);
     assertEquals(0, rc);
     WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {});
     assertEquals(outputEntities.length, 3);
@@ -66,8 +67,8 @@ public class TestCreateUdfEntities {
 
   @Test
   public void testUdfWithDfsResource() throws Exception {
-    int rc = driver.compile("CREATE FUNCTION default." + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "
-            + " using file '" + "hdfs:///tmp/udf1.jar'");
+    int rc = ((ReExecDriver)driver).compile("CREATE FUNCTION default." + funcName + " AS " +
+        "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf'  using file '" + "hdfs:///tmp/udf1.jar'", true);
     assertEquals(0, rc);
     WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {});
     assertEquals(outputEntities.length, 3);
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
index 191d5f5..137bedd 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistic;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsNames;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.junit.Assert;
@@ -35,7 +34,6 @@ import org.junit.Test;
 
 import java.io.File;
 import java.net.MalformedURLException;
-import java.net.URL;
 import java.nio.file.Paths;
 import java.util.List;
 import java.util.Map;
@@ -62,7 +60,7 @@ public class TestSparkStatistics {
               null, null);
 
       Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode());
-      Assert.assertEquals(0, driver.compile("select * from test order by col"));
+      Assert.assertEquals(0, driver.compile("select * from test order by col", true));
 
       List<SparkTask> sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks());
       Assert.assertEquals(1, sparkTasks.size());
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
index 5c803ca..a897dc6 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
@@ -135,7 +135,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select i from " + tableName
-        + " where k = 'X' and city = 'Scottsdale-AZ' ");
+        + " where k = 'X' and city = 'Scottsdale-AZ' ", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -151,7 +151,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select i from " + viewName
-        + " where k = 'X' and city = 'Scottsdale-AZ' ");
+        + " where k = 'X' and city = 'Scottsdale-AZ' ", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -168,7 +168,7 @@ public class TestHiveAuthorizerCheckInvocation {
     reset(mockedAuthorizer);
     int status = driver.compile("select " + viewName + ".i, " + tableName + ".city from "
         + viewName + " join " + tableName + " on " + viewName + ".city = " + tableName
-        + ".city where " + tableName + ".k = 'X'");
+        + ".city where " + tableName + ".k = 'X'", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -194,7 +194,7 @@ public class TestHiveAuthorizerCheckInvocation {
   public void testInputAllColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
-    int status = driver.compile("select * from " + tableName + " order by i");
+    int status = driver.compile("select * from " + tableName + " order by i", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -220,7 +220,7 @@ public class TestHiveAuthorizerCheckInvocation {
   private void checkCreateViewOrTableWithDb(String newTable, String cmd)
       throws HiveAuthzPluginException, HiveAccessControlException {
     reset(mockedAuthorizer);
-    int status = driver.compile(cmd);
+    int status = driver.compile(cmd, true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
@@ -248,7 +248,7 @@ public class TestHiveAuthorizerCheckInvocation {
   public void testInputNoColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
-    int status = driver.compile("describe " + tableName);
+    int status = driver.compile("describe " + tableName, true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -263,7 +263,7 @@ public class TestHiveAuthorizerCheckInvocation {
     reset(mockedAuthorizer);
     final String funcName = "testauthfunc1";
     int status = driver.compile("create function " + dbName + "." + funcName
-        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
+        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
@@ -292,7 +292,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     // Verify privilege objects
     reset(mockedAuthorizer);
-    status = driver.compile("select  " + dbName + "." + funcName + "() , i from " + tableName);
+    status = driver.compile("select  " + dbName + "." + funcName + "() , i from " + tableName, true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -322,7 +322,7 @@ public class TestHiveAuthorizerCheckInvocation {
     // try using 2nd permanent function and verify its only 2nd one that shows up
     // for auth
     reset(mockedAuthorizer);
-    status = driver.compile("select  " + dbName + "." + funcName2 + "(i)  from " + tableName);
+    status = driver.compile("select  " + dbName + "." + funcName2 + "(i)  from " + tableName, true);
     assertEquals(0, status);
 
     inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -345,7 +345,7 @@ public class TestHiveAuthorizerCheckInvocation {
     // try using both permanent functions
     reset(mockedAuthorizer);
     status = driver.compile(
-        "select  " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j  from " + tableName);
+        "select  " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j  from " + tableName, true);
     assertEquals(0, status);
 
     inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -376,7 +376,7 @@ public class TestHiveAuthorizerCheckInvocation {
     reset(mockedAuthorizer);
     final String funcName = "testAuthFunc2";
     int status = driver.compile("create temporary function " + funcName
-        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
+        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
@@ -409,12 +409,12 @@ public class TestHiveAuthorizerCheckInvocation {
       assertEquals("output count", 1, outputs.size());
       assertEquals("output type", HivePrivilegeObjectType.DATABASE, outputs.get(0).getType());
 
-      status = driver.compile("select * from " + tableName);
+      status = driver.compile("select * from " + tableName, true);
       assertEquals(0, status);
     }
     { // select from the temp table
       reset(mockedAuthorizer);
-      int status = driver.compile("insert into " + tableName + " values(1)");
+      int status = driver.compile("insert into " + tableName + " values(1)", true);
       assertEquals(0, status);
 
       // temp tables should be skipped from authorization
@@ -428,7 +428,7 @@ public class TestHiveAuthorizerCheckInvocation {
     }
     { // select from the temp table
       reset(mockedAuthorizer);
-      int status = driver.compile("select * from " + tableName);
+      int status = driver.compile("select * from " + tableName, true);
       assertEquals(0, status);
 
       // temp tables should be skipped from authorization
@@ -450,7 +450,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals(0, status);
 
     reset(mockedAuthorizer);
-    status = driver.compile("insert into " + tableName + " values (1)");
+    status = driver.compile("insert into " + tableName + " values (1)", true);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -460,7 +460,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals("input count", 0, inputs.size());
 
     reset(mockedAuthorizer);
-    status = driver.compile("select * from " + tableName);
+    status = driver.compile("select * from " + tableName, true);
     assertEquals(0, status);
 
     inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -480,7 +480,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testUpdateSomeColumnsUsed() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3");
+    int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3", true);
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -499,7 +499,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testUpdateSomeColumnsUsedExprInSet() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3");
+    int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3", true);
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -520,7 +520,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testDelete() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("delete from " + acidTableName + " where j = 3");
+    int status = driver.compile("delete from " + acidTableName + " where j = 3", true);
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -534,7 +534,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testShowTables() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("show tables");
+    int status = driver.compile("show tables", true);
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -547,7 +547,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testDescDatabase() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("describe database " + dbName);
+    int status = driver.compile("describe database " + dbName, true);
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -574,7 +574,7 @@ public class TestHiveAuthorizerCheckInvocation {
   public void testReplDump() throws Exception {
 
     resetAuthorizer();
-    int status = driver.compile("repl dump " + dbName);
+    int status = driver.compile("repl dump " + dbName, true);
     assertEquals(0, status);
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
     HivePrivilegeObject dbObj = inputs.get(0);
@@ -582,7 +582,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals("db name", dbName.toLowerCase(), dbObj.getDbname());
 
     resetAuthorizer();
-    status = driver.compile("repl dump " + dbName + ".'" + inDbTableName + "'");
+    status = driver.compile("repl dump " + dbName + ".'" + inDbTableName + "'", true);
     assertEquals(0, status);
     inputs = getHivePrivilegeObjectInputs().getLeft();
     dbObj = inputs.get(0);
@@ -627,7 +627,8 @@ public class TestHiveAuthorizerCheckInvocation {
         inputsCapturer.capture(), outputsCapturer.capture(),
         any(HiveAuthzContext.class));
 
-    return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue());
+    return new ImmutablePair<List<HivePrivilegeObject>, List<HivePrivilegeObject>>(
+        inputsCapturer.getValue(), outputsCapturer.getValue());
   }
 
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 2eb6591..043c976 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -55,7 +55,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
-import org.apache.hadoop.hive.metastore.ColumnType;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -123,9 +122,6 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.wm.WmContext;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hive.common.util.ShutdownHookManager;
 import org.apache.hive.common.util.TxnIdUtils;
@@ -143,12 +139,12 @@ public class Driver implements IDriver {
   static final private String CLASS_NAME = Driver.class.getName();
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
-  static final int SHUTDOWN_HOOK_PRIORITY = 0;
+  private static final int SHUTDOWN_HOOK_PRIORITY = 0;
   private final QueryInfo queryInfo;
   private Runnable shutdownRunner = null;
 
   private int maxRows = 100;
-  ByteStream.Output bos = new ByteStream.Output();
+  private ByteStream.Output bos = new ByteStream.Output();
 
   private final HiveConf conf;
   private DataInput resStream;
@@ -161,7 +157,7 @@ public class Driver implements IDriver {
   private Throwable downstreamError;
 
   private FetchTask fetchTask;
-  List<HiveLock> hiveLocks = new ArrayList<HiveLock>();
+  private List<HiveLock> hiveLocks = new ArrayList<HiveLock>();
 
   // A limit on the number of threads that can be launched
   private int maxthreads;
@@ -271,24 +267,6 @@ public class Driver implements IDriver {
     return true;
   }
 
-  /**
-   * Return the status information about the Map-Reduce cluster
-   */
-  public ClusterStatus getClusterStatus() throws Exception {
-    ClusterStatus cs;
-    try {
-      JobConf job = new JobConf(conf);
-      JobClient jc = new JobClient(job);
-      cs = jc.getClusterStatus();
-    } catch (Exception e) {
-      e.printStackTrace();
-      throw e;
-    }
-    LOG.info("Returning cluster status: " + cs.toString());
-    return cs;
-  }
-
-
   @Override
   public Schema getSchema() {
     return schema;
@@ -359,37 +337,6 @@ public class Driver implements IDriver {
   }
 
   /**
-   * Get a Schema with fields represented with Thrift DDL types
-   */
-  public Schema getThriftSchema() throws Exception {
-    Schema schema;
-    try {
-      schema = getSchema();
-      if (schema != null) {
-        List<FieldSchema> lst = schema.getFieldSchemas();
-        // Go over the schema and convert type to thrift type
-        if (lst != null) {
-          for (FieldSchema f : lst) {
-            f.setType(ColumnType.typeToThriftType(f.getType()));
-          }
-        }
-      }
-    } catch (Exception e) {
-      e.printStackTrace();
-      throw e;
-    }
-    LOG.info("Returning Thrift schema: " + schema);
-    return schema;
-  }
-
-  /**
-   * Return the maximum number of rows returned by getResults
-   */
-  public int getMaxRows() {
-    return maxRows;
-  }
-
-  /**
    * Set the maximum number of rows returned by getResults
    */
   @Override
@@ -397,19 +344,13 @@ public class Driver implements IDriver {
     this.maxRows = maxRows;
   }
 
+  @VisibleForTesting
   public Driver(HiveConf conf) {
     this(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), null);
   }
 
   // Pass lineageState when a driver instantiates another Driver to run
   // or compile another query
-  // NOTE: only used from index related classes
-  public Driver(HiveConf conf, LineageState lineageState) {
-    this(getNewQueryState(conf, lineageState), null);
-  }
-
-  // Pass lineageState when a driver instantiates another Driver to run
-  // or compile another query
   public Driver(HiveConf conf, Context ctx, LineageState lineageState) {
     this(getNewQueryState(conf, lineageState), null, null);
     this.ctx = ctx;
@@ -453,18 +394,6 @@ public class Driver implements IDriver {
   }
 
   /**
-   * Compile a new query. Any currently-planned query associated with this Driver is discarded.
-   * Do not reset id for inner queries(index, etc). Task ids are used for task identity check.
-   *
-   * @param command
-   *          The SQL query to compile.
-   */
-  @Override
-  public int compile(String command) {
-    return compile(command, true);
-  }
-
-  /**
    * Compile a new query, but potentially reset taskID counter.  Not resetting task counter
    * is useful for generating re-entrant QL queries.
    * @param command  The HiveQL query to compile
@@ -2585,11 +2514,6 @@ public class Driver implements IDriver {
     ShutdownHookManager.removeShutdownHook(shutdownRunner);
   }
 
-
-  public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan() throws IOException {
-    return plan.getQueryPlan();
-  }
-
   public String getErrorMsg() {
     return errorMessage;
   }
@@ -2642,7 +2566,7 @@ public class Driver implements IDriver {
     }
   }
 
-  public void setCompactionWriteIds(ValidWriteIdList val, long compactorTxnId) {
+  void setCompactionWriteIds(ValidWriteIdList val, long compactorTxnId) {
     this.compactionWriteIds = val;
     this.compactorTxnId = compactorTxnId;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
index e44e6a3..b618224 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
@@ -36,8 +36,6 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 @InterfaceStability.Unstable
 public interface IDriver extends CommandProcessor {
 
-  int compile(String string);
-
   CommandProcessorResponse compileAndRespond(String statement);
 
   QueryPlan getPlan();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
index ab5c66b..e8bf9dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.reexec;
 
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -87,7 +86,7 @@ public class ReExecDriver implements IDriver {
 
   private static final Logger LOG = LoggerFactory.getLogger(ReExecDriver.class);
   private boolean explainReOptimization;
-  protected Driver coreDriver;
+  private Driver coreDriver;
   private QueryState queryState;
   private String currentQuery;
   private int executionIndex;
@@ -99,7 +98,7 @@ public class ReExecDriver implements IDriver {
     return queryState.getConf();
   }
 
-  public boolean firstExecution() {
+  private boolean firstExecution() {
     return executionIndex == 0;
   }
 
@@ -115,9 +114,8 @@ public class ReExecDriver implements IDriver {
     }
   }
 
-  @Override
-  public int compile(String string) {
-    return coreDriver.compile(string);
+  public int compile(String command, boolean resetTaskIds) {
+    return coreDriver.compile(command, resetTaskIds);
   }
 
   @Override
@@ -223,7 +221,7 @@ public class ReExecDriver implements IDriver {
     return run();
   }
 
-  protected void prepareToReExecute() {
+  private void prepareToReExecute() {
     for (IReExecutionPlugin p : plugins) {
       p.prepareToReExecute();
     }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java
index f42cffd..0b61a1a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.reexec.ReExecDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -71,7 +71,7 @@ public class TestHiveSparkClient {
       Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode());
 
       String query = "select * from test order by col";
-      driver.compile(query);
+      ((ReExecDriver)driver).compile(query, true);
       List<SparkTask> sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks());
       Assert.assertEquals(1, sparkTasks.size());
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java
index ef02a29..5badabf 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveKey;
+import org.apache.hadoop.hive.ql.reexec.ReExecDriver;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -77,7 +78,7 @@ public class TestSparkPlan {
       driver = DriverFactory.newDriver(conf);
       Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode());
 
-      driver.compile("select * from test order by col");
+      ((ReExecDriver)driver).compile("select * from test order by col", true);
       List<SparkTask> sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks());
       Assert.assertEquals(1, sparkTasks.size());
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
index c1081f2..b38b128 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
@@ -77,7 +77,7 @@ public class TestHooks {
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
     Driver driver = createDriver(conf);
-    int ret = driver.compile("select 'XXX' from t1");
+    int ret = driver.compile("select 'XXX' from t1", true);
     assertEquals("Checking command success", 0, ret);
     assertEquals("select 'AAA' from t1", conf.getQueryString());
   }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
index 005d420..1a1a7b1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
@@ -59,7 +59,6 @@ import org.junit.Test;
 /**
  * Unittest for SymlinkTextInputFormat.
  */
-@SuppressWarnings("deprecation")
 public class TestSymlinkTextInputFormat {
   private static final Logger log =
       LoggerFactory.getLogger(TestSymlinkTextInputFormat.class);
@@ -113,22 +112,17 @@ public class TestSymlinkTextInputFormat {
   public void testCombine() throws Exception {
     JobConf newJob = new JobConf(job);
     FileSystem fs = dataDir1.getFileSystem(newJob);
-    int symbolLinkedFileSize = 0;
 
     Path dir1_file1 = new Path(dataDir1, "combinefile1_1");
     writeTextFile(dir1_file1,
                   "dir1_file1_line1\n" +
                   "dir1_file1_line2\n");
 
-    symbolLinkedFileSize += fs.getFileStatus(dir1_file1).getLen();
-
     Path dir2_file1 = new Path(dataDir2, "combinefile2_1");
     writeTextFile(dir2_file1,
                   "dir2_file1_line1\n" +
                   "dir2_file1_line2\n");
 
-    symbolLinkedFileSize += fs.getFileStatus(dir2_file1).getLen();
-
     // A symlink file, contains first file from first dir and second file from
     // second dir.
     writeSymlinkFile(
@@ -172,7 +166,7 @@ public class TestSymlinkTextInputFormat {
       }
 
       String cmd = "select key*1 from " + tblName;
-      ecode = drv.compile(cmd);
+      ecode = drv.compile(cmd, true);
       if (ecode != 0) {
         throw new Exception("Select compile: " + cmd
             + " failed with exit code= " + ecode);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
index d2b9327..594688b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
@@ -59,7 +59,7 @@ public class TestColumnAccess {
   public void testQueryTable1() throws ParseException {
     String query = "select * from t1";
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertEquals("Checking command success", 0, rc);
     QueryPlan plan = driver.getPlan();
     // check access columns from ColumnAccessInfo
@@ -83,7 +83,7 @@ public class TestColumnAccess {
   public void testJoinTable1AndTable2() throws ParseException {
     String query = "select * from t1 join t2 on (t1.id1 = t2.id1)";
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertEquals("Checking command success", 0, rc);
     QueryPlan plan = driver.getPlan();
     // check access columns from ColumnAccessInfo
@@ -120,7 +120,7 @@ public class TestColumnAccess {
   public void testJoinView1AndTable2() throws ParseException {
     String query = "select * from v1 join t2 on (v1.id1 = t2.id1)";
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertEquals("Checking command success", 0, rc);
     QueryPlan plan = driver.getPlan();
     // check access columns from ColumnAccessInfo
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
index 0d87662..5096763 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
@@ -57,7 +57,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(66,7))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]"));
@@ -68,7 +68,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(0,7))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]"));
@@ -79,7 +79,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7,33))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision"));
@@ -90,7 +90,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7,-1))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("extraneous input '-' expecting Number"));
@@ -101,7 +101,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7,33,4))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
       driver.getErrorMsg().contains("missing ) at ',' near ',' in column name or constraint"));
@@ -112,7 +112,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7a))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("mismatched input '7a' expecting Number near '('"));
@@ -123,7 +123,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(20,23))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision"));
@@ -142,7 +142,7 @@ public class TestHiveDecimalParse {
 
   private String getColumnType(String query) {
     Driver driver = createDriver();
-    int rc = driver.compile(query);
+    int rc = driver.compile(query, true);
 
     if (rc != 0) {
       return null;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
index 88edc12..51f610d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
@@ -76,7 +76,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityDirect() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from t1");
+    int ret = driver.compile("select * from t1", true);
     assertEquals("Checking command success", 0, ret);
     assertEquals(1, CheckInputReadEntityDirect.readEntities.size());
     assertTrue("isDirect", CheckInputReadEntityDirect.readEntities.iterator().next().isDirect());
@@ -90,7 +90,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityInDirect() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from v1");
+    int ret = driver.compile("select * from v1", true);
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
@@ -113,7 +113,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityViewDirectJoin() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)");
+    int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)", true);
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
@@ -136,7 +136,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityViewDirectUnion() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1");
+    int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1", true);
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
@@ -158,7 +158,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityInDirectJoinAlias() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)");
+    int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)", true);
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
index 6d86ca2..97ef3c4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
@@ -93,7 +93,7 @@ public class TestViewEntity {
         .getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view1 );
+    driver.compile("select * from " + view1, true);
     // view entity
     assertEquals("default@" + view1, CheckInputReadEntity.readEntities[0].getName());
 
@@ -128,7 +128,7 @@ public class TestViewEntity {
     ret = driver.run("create view " + view1 + " as select * from " + tab1).getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view1 );
+    driver.compile("select * from " + view1, true);
     // view entity
     assertEquals("default@" + view1, CheckInputReadEntity.readEntities[0].getName());
 
@@ -204,7 +204,7 @@ public class TestViewEntity {
     assertEquals("Checking command success", 0, ret);
 
     //select from view2
-    driver.compile("select * from " + view2);
+    driver.compile("select * from " + view2, true);
 
     //verify that only view2 is direct input in above query
     ReadEntity[] readEntities = CheckInputReadEntity.readEntities;
@@ -243,7 +243,7 @@ public class TestViewEntity {
     ret = driver.run("create view " + view2 + " as select * from (select * from " + view1 + ") x").getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view2);
+    driver.compile("select * from " + view2, true);
     // view entity
     assertEquals("default@" + view2, CheckInputReadEntity.readEntities[0].getName());
 
@@ -277,7 +277,7 @@ public class TestViewEntity {
     ret = driver.run("create view " + view2 + " as select * from (select * from " + view1 + " union all select * from " + view1 + ") x").getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view2);
+    driver.compile("select * from " + view2, true);
     // view entity
     assertEquals("default@" + view2, CheckInputReadEntity.readEntities[0].getName());
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java
index 9b50a0d..0f45c81 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java
@@ -99,7 +99,7 @@ public class TestHivePrivilegeObjectOwnerNameAndType {
   @Test
   public void testOwnerNames() throws Exception {
     reset(mockedAuthorizer);
-    driver.compile("create table default.t1 (name string)");
+    driver.compile("create table default.t1 (name string)", true);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
     boolean containsDBOwnerName = false;
@@ -129,7 +129,7 @@ public class TestHivePrivilegeObjectOwnerNameAndType {
   @Test
   public void testOwnerType() throws Exception {
     reset(mockedAuthorizer);
-    driver.compile("create table default.t1 (name string)");
+    driver.compile("create table default.t1 (name string)", true);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
     boolean containsOwnerType = false;
@@ -164,7 +164,8 @@ public class TestHivePrivilegeObjectOwnerNameAndType {
         .checkPrivileges(any(HiveOperationType.class), inputsCapturer.capture(), outputsCapturer.capture(),
             any(HiveAuthzContext.class));
 
-    return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue());
+    return new ImmutablePair<List<HivePrivilegeObject>, List<HivePrivilegeObject>>(
+        inputsCapturer.getValue(), outputsCapturer.getValue());
   }
 
 }