You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mg...@apache.org on 2019/07/25 09:55:28 UTC

[hive] branch master updated: Revert "HIVE-21996 Remove unused code from Driver"

This is an automated email from the ASF dual-hosted git repository.

mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 06edd56  Revert "HIVE-21996 Remove unused code from Driver"
06edd56 is described below

commit 06edd561f316159da0547244d648917e22332c89
Author: miklosgergely <mg...@cloudera.com>
AuthorDate: Thu Jul 25 11:54:15 2019 +0200

    Revert "HIVE-21996 Remove unused code from Driver"
    
    This reverts commit 2e7441f8317818b38a970493858b0ed296ffdda1.
---
 .../hadoop/hive/ql/TestCreateUdfEntities.java      |  9 +--
 .../hive/ql/exec/spark/TestSparkStatistics.java    |  4 +-
 .../plugin/TestHiveAuthorizerCheckInvocation.java  | 49 ++++++------
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  | 86 ++++++++++++++++++++--
 ql/src/java/org/apache/hadoop/hive/ql/IDriver.java |  2 +
 .../apache/hadoop/hive/ql/reexec/ReExecDriver.java | 12 +--
 .../hive/ql/exec/spark/TestHiveSparkClient.java    |  4 +-
 .../hadoop/hive/ql/exec/spark/TestSparkPlan.java   |  3 +-
 .../org/apache/hadoop/hive/ql/hooks/TestHooks.java |  2 +-
 .../hive/ql/io/TestSymlinkTextInputFormat.java     |  8 +-
 .../hadoop/hive/ql/parse/TestColumnAccess.java     |  6 +-
 .../hadoop/hive/ql/parse/TestHiveDecimalParse.java | 16 ++--
 .../hadoop/hive/ql/plan/TestReadEntityDirect.java  | 10 +--
 .../apache/hadoop/hive/ql/plan/TestViewEntity.java | 10 +--
 .../TestHivePrivilegeObjectOwnerNameAndType.java   |  7 +-
 15 files changed, 156 insertions(+), 72 deletions(-)

diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
index 325831e..1894ba0 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.*;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.reexec.ReExecDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.After;
 import org.junit.Before;
@@ -49,8 +48,8 @@ public class TestCreateUdfEntities {
 
   @Test
   public void testUdfWithLocalResource() throws Exception {
-    int rc = ((ReExecDriver)driver).compile("CREATE FUNCTION " + funcName + " AS " +
-        "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf'  using file '" + "file:///tmp/udf1.jar'", true);
+    int rc = driver.compile("CREATE FUNCTION " + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "
+            + " using file '" + "file:///tmp/udf1.jar'");
     assertEquals(0, rc);
     WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {});
     assertEquals(outputEntities.length, 3);
@@ -67,8 +66,8 @@ public class TestCreateUdfEntities {
 
   @Test
   public void testUdfWithDfsResource() throws Exception {
-    int rc = ((ReExecDriver)driver).compile("CREATE FUNCTION default." + funcName + " AS " +
-        "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf'  using file '" + "hdfs:///tmp/udf1.jar'", true);
+    int rc = driver.compile("CREATE FUNCTION default." + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' "
+            + " using file '" + "hdfs:///tmp/udf1.jar'");
     assertEquals(0, rc);
     WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {});
     assertEquals(outputEntities.length, 3);
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
index 137bedd..191d5f5 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistic;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsNames;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.junit.Assert;
@@ -34,6 +35,7 @@ import org.junit.Test;
 
 import java.io.File;
 import java.net.MalformedURLException;
+import java.net.URL;
 import java.nio.file.Paths;
 import java.util.List;
 import java.util.Map;
@@ -60,7 +62,7 @@ public class TestSparkStatistics {
               null, null);
 
       Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode());
-      Assert.assertEquals(0, driver.compile("select * from test order by col", true));
+      Assert.assertEquals(0, driver.compile("select * from test order by col"));
 
       List<SparkTask> sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks());
       Assert.assertEquals(1, sparkTasks.size());
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
index a897dc6..5c803ca 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
@@ -135,7 +135,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select i from " + tableName
-        + " where k = 'X' and city = 'Scottsdale-AZ' ", true);
+        + " where k = 'X' and city = 'Scottsdale-AZ' ");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -151,7 +151,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     reset(mockedAuthorizer);
     int status = driver.compile("select i from " + viewName
-        + " where k = 'X' and city = 'Scottsdale-AZ' ", true);
+        + " where k = 'X' and city = 'Scottsdale-AZ' ");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -168,7 +168,7 @@ public class TestHiveAuthorizerCheckInvocation {
     reset(mockedAuthorizer);
     int status = driver.compile("select " + viewName + ".i, " + tableName + ".city from "
         + viewName + " join " + tableName + " on " + viewName + ".city = " + tableName
-        + ".city where " + tableName + ".k = 'X'", true);
+        + ".city where " + tableName + ".k = 'X'");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -194,7 +194,7 @@ public class TestHiveAuthorizerCheckInvocation {
   public void testInputAllColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
-    int status = driver.compile("select * from " + tableName + " order by i", true);
+    int status = driver.compile("select * from " + tableName + " order by i");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -220,7 +220,7 @@ public class TestHiveAuthorizerCheckInvocation {
   private void checkCreateViewOrTableWithDb(String newTable, String cmd)
       throws HiveAuthzPluginException, HiveAccessControlException {
     reset(mockedAuthorizer);
-    int status = driver.compile(cmd, true);
+    int status = driver.compile(cmd);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
@@ -248,7 +248,7 @@ public class TestHiveAuthorizerCheckInvocation {
   public void testInputNoColumnsUsed() throws Exception {
 
     reset(mockedAuthorizer);
-    int status = driver.compile("describe " + tableName, true);
+    int status = driver.compile("describe " + tableName);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -263,7 +263,7 @@ public class TestHiveAuthorizerCheckInvocation {
     reset(mockedAuthorizer);
     final String funcName = "testauthfunc1";
     int status = driver.compile("create function " + dbName + "." + funcName
-        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'", true);
+        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
@@ -292,7 +292,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     // Verify privilege objects
     reset(mockedAuthorizer);
-    status = driver.compile("select  " + dbName + "." + funcName + "() , i from " + tableName, true);
+    status = driver.compile("select  " + dbName + "." + funcName + "() , i from " + tableName);
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -322,7 +322,7 @@ public class TestHiveAuthorizerCheckInvocation {
     // try using 2nd permanent function and verify its only 2nd one that shows up
     // for auth
     reset(mockedAuthorizer);
-    status = driver.compile("select  " + dbName + "." + funcName2 + "(i)  from " + tableName, true);
+    status = driver.compile("select  " + dbName + "." + funcName2 + "(i)  from " + tableName);
     assertEquals(0, status);
 
     inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -345,7 +345,7 @@ public class TestHiveAuthorizerCheckInvocation {
     // try using both permanent functions
     reset(mockedAuthorizer);
     status = driver.compile(
-        "select  " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j  from " + tableName, true);
+        "select  " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j  from " + tableName);
     assertEquals(0, status);
 
     inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -376,7 +376,7 @@ public class TestHiveAuthorizerCheckInvocation {
     reset(mockedAuthorizer);
     final String funcName = "testAuthFunc2";
     int status = driver.compile("create temporary function " + funcName
-        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'", true);
+        + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
@@ -409,12 +409,12 @@ public class TestHiveAuthorizerCheckInvocation {
       assertEquals("output count", 1, outputs.size());
       assertEquals("output type", HivePrivilegeObjectType.DATABASE, outputs.get(0).getType());
 
-      status = driver.compile("select * from " + tableName, true);
+      status = driver.compile("select * from " + tableName);
       assertEquals(0, status);
     }
     { // select from the temp table
       reset(mockedAuthorizer);
-      int status = driver.compile("insert into " + tableName + " values(1)", true);
+      int status = driver.compile("insert into " + tableName + " values(1)");
       assertEquals(0, status);
 
       // temp tables should be skipped from authorization
@@ -428,7 +428,7 @@ public class TestHiveAuthorizerCheckInvocation {
     }
     { // select from the temp table
       reset(mockedAuthorizer);
-      int status = driver.compile("select * from " + tableName, true);
+      int status = driver.compile("select * from " + tableName);
       assertEquals(0, status);
 
       // temp tables should be skipped from authorization
@@ -450,7 +450,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals(0, status);
 
     reset(mockedAuthorizer);
-    status = driver.compile("insert into " + tableName + " values (1)", true);
+    status = driver.compile("insert into " + tableName + " values (1)");
     assertEquals(0, status);
 
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -460,7 +460,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals("input count", 0, inputs.size());
 
     reset(mockedAuthorizer);
-    status = driver.compile("select * from " + tableName, true);
+    status = driver.compile("select * from " + tableName);
     assertEquals(0, status);
 
     inputs = getHivePrivilegeObjectInputs().getLeft();
@@ -480,7 +480,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testUpdateSomeColumnsUsed() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3", true);
+    int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3");
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -499,7 +499,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testUpdateSomeColumnsUsedExprInSet() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3", true);
+    int status = driver.compile("update " + acidTableName + " set i = 5, j = k where j = 3");
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -520,7 +520,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testDelete() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("delete from " + acidTableName + " where j = 3", true);
+    int status = driver.compile("delete from " + acidTableName + " where j = 3");
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -534,7 +534,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testShowTables() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("show tables", true);
+    int status = driver.compile("show tables");
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -547,7 +547,7 @@ public class TestHiveAuthorizerCheckInvocation {
   @Test
   public void testDescDatabase() throws Exception {
     reset(mockedAuthorizer);
-    int status = driver.compile("describe database " + dbName, true);
+    int status = driver.compile("describe database " + dbName);
     assertEquals(0, status);
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
@@ -574,7 +574,7 @@ public class TestHiveAuthorizerCheckInvocation {
   public void testReplDump() throws Exception {
 
     resetAuthorizer();
-    int status = driver.compile("repl dump " + dbName, true);
+    int status = driver.compile("repl dump " + dbName);
     assertEquals(0, status);
     List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
     HivePrivilegeObject dbObj = inputs.get(0);
@@ -582,7 +582,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals("db name", dbName.toLowerCase(), dbObj.getDbname());
 
     resetAuthorizer();
-    status = driver.compile("repl dump " + dbName + ".'" + inDbTableName + "'", true);
+    status = driver.compile("repl dump " + dbName + ".'" + inDbTableName + "'");
     assertEquals(0, status);
     inputs = getHivePrivilegeObjectInputs().getLeft();
     dbObj = inputs.get(0);
@@ -627,8 +627,7 @@ public class TestHiveAuthorizerCheckInvocation {
         inputsCapturer.capture(), outputsCapturer.capture(),
         any(HiveAuthzContext.class));
 
-    return new ImmutablePair<List<HivePrivilegeObject>, List<HivePrivilegeObject>>(
-        inputsCapturer.getValue(), outputsCapturer.getValue());
+    return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue());
   }
 
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 043c976..2eb6591 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
+import org.apache.hadoop.hive.metastore.ColumnType;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -122,6 +123,9 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.wm.WmContext;
 import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hive.common.util.ShutdownHookManager;
 import org.apache.hive.common.util.TxnIdUtils;
@@ -139,12 +143,12 @@ public class Driver implements IDriver {
   static final private String CLASS_NAME = Driver.class.getName();
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
-  private static final int SHUTDOWN_HOOK_PRIORITY = 0;
+  static final int SHUTDOWN_HOOK_PRIORITY = 0;
   private final QueryInfo queryInfo;
   private Runnable shutdownRunner = null;
 
   private int maxRows = 100;
-  private ByteStream.Output bos = new ByteStream.Output();
+  ByteStream.Output bos = new ByteStream.Output();
 
   private final HiveConf conf;
   private DataInput resStream;
@@ -157,7 +161,7 @@ public class Driver implements IDriver {
   private Throwable downstreamError;
 
   private FetchTask fetchTask;
-  private List<HiveLock> hiveLocks = new ArrayList<HiveLock>();
+  List<HiveLock> hiveLocks = new ArrayList<HiveLock>();
 
   // A limit on the number of threads that can be launched
   private int maxthreads;
@@ -267,6 +271,24 @@ public class Driver implements IDriver {
     return true;
   }
 
+  /**
+   * Return the status information about the Map-Reduce cluster
+   */
+  public ClusterStatus getClusterStatus() throws Exception {
+    ClusterStatus cs;
+    try {
+      JobConf job = new JobConf(conf);
+      JobClient jc = new JobClient(job);
+      cs = jc.getClusterStatus();
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw e;
+    }
+    LOG.info("Returning cluster status: " + cs.toString());
+    return cs;
+  }
+
+
   @Override
   public Schema getSchema() {
     return schema;
@@ -337,6 +359,37 @@ public class Driver implements IDriver {
   }
 
   /**
+   * Get a Schema with fields represented with Thrift DDL types
+   */
+  public Schema getThriftSchema() throws Exception {
+    Schema schema;
+    try {
+      schema = getSchema();
+      if (schema != null) {
+        List<FieldSchema> lst = schema.getFieldSchemas();
+        // Go over the schema and convert type to thrift type
+        if (lst != null) {
+          for (FieldSchema f : lst) {
+            f.setType(ColumnType.typeToThriftType(f.getType()));
+          }
+        }
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw e;
+    }
+    LOG.info("Returning Thrift schema: " + schema);
+    return schema;
+  }
+
+  /**
+   * Return the maximum number of rows returned by getResults
+   */
+  public int getMaxRows() {
+    return maxRows;
+  }
+
+  /**
    * Set the maximum number of rows returned by getResults
    */
   @Override
@@ -344,13 +397,19 @@ public class Driver implements IDriver {
     this.maxRows = maxRows;
   }
 
-  @VisibleForTesting
   public Driver(HiveConf conf) {
     this(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), null);
   }
 
   // Pass lineageState when a driver instantiates another Driver to run
   // or compile another query
+  // NOTE: only used from index related classes
+  public Driver(HiveConf conf, LineageState lineageState) {
+    this(getNewQueryState(conf, lineageState), null);
+  }
+
+  // Pass lineageState when a driver instantiates another Driver to run
+  // or compile another query
   public Driver(HiveConf conf, Context ctx, LineageState lineageState) {
     this(getNewQueryState(conf, lineageState), null, null);
     this.ctx = ctx;
@@ -394,6 +453,18 @@ public class Driver implements IDriver {
   }
 
   /**
+   * Compile a new query. Any currently-planned query associated with this Driver is discarded.
+   * Do not reset id for inner queries(index, etc). Task ids are used for task identity check.
+   *
+   * @param command
+   *          The SQL query to compile.
+   */
+  @Override
+  public int compile(String command) {
+    return compile(command, true);
+  }
+
+  /**
    * Compile a new query, but potentially reset taskID counter.  Not resetting task counter
    * is useful for generating re-entrant QL queries.
    * @param command  The HiveQL query to compile
@@ -2514,6 +2585,11 @@ public class Driver implements IDriver {
     ShutdownHookManager.removeShutdownHook(shutdownRunner);
   }
 
+
+  public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan() throws IOException {
+    return plan.getQueryPlan();
+  }
+
   public String getErrorMsg() {
     return errorMessage;
   }
@@ -2566,7 +2642,7 @@ public class Driver implements IDriver {
     }
   }
 
-  void setCompactionWriteIds(ValidWriteIdList val, long compactorTxnId) {
+  public void setCompactionWriteIds(ValidWriteIdList val, long compactorTxnId) {
     this.compactionWriteIds = val;
     this.compactorTxnId = compactorTxnId;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
index b618224..e44e6a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/IDriver.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 @InterfaceStability.Unstable
 public interface IDriver extends CommandProcessor {
 
+  int compile(String string);
+
   CommandProcessorResponse compileAndRespond(String statement);
 
   QueryPlan getPlan();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
index e8bf9dc..ab5c66b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.reexec;
 
 import java.io.IOException;
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -86,7 +87,7 @@ public class ReExecDriver implements IDriver {
 
   private static final Logger LOG = LoggerFactory.getLogger(ReExecDriver.class);
   private boolean explainReOptimization;
-  private Driver coreDriver;
+  protected Driver coreDriver;
   private QueryState queryState;
   private String currentQuery;
   private int executionIndex;
@@ -98,7 +99,7 @@ public class ReExecDriver implements IDriver {
     return queryState.getConf();
   }
 
-  private boolean firstExecution() {
+  public boolean firstExecution() {
     return executionIndex == 0;
   }
 
@@ -114,8 +115,9 @@ public class ReExecDriver implements IDriver {
     }
   }
 
-  public int compile(String command, boolean resetTaskIds) {
-    return coreDriver.compile(command, resetTaskIds);
+  @Override
+  public int compile(String string) {
+    return coreDriver.compile(string);
   }
 
   @Override
@@ -221,7 +223,7 @@ public class ReExecDriver implements IDriver {
     return run();
   }
 
-  private void prepareToReExecute() {
+  protected void prepareToReExecute() {
     for (IReExecutionPlugin p : plugins) {
       p.prepareToReExecute();
     }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java
index 0b61a1a..f42cffd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveSparkClient.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.reexec.ReExecDriver;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -71,7 +71,7 @@ public class TestHiveSparkClient {
       Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode());
 
       String query = "select * from test order by col";
-      ((ReExecDriver)driver).compile(query, true);
+      driver.compile(query);
       List<SparkTask> sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks());
       Assert.assertEquals(1, sparkTasks.size());
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java
index 5badabf..ef02a29 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkPlan.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hive.ql.DriverFactory;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveKey;
-import org.apache.hadoop.hive.ql.reexec.ReExecDriver;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -78,7 +77,7 @@ public class TestSparkPlan {
       driver = DriverFactory.newDriver(conf);
       Assert.assertEquals(0, driver.run("create table test (col int)").getResponseCode());
 
-      ((ReExecDriver)driver).compile("select * from test order by col", true);
+      driver.compile("select * from test order by col");
       List<SparkTask> sparkTasks = Utilities.getSparkTasks(driver.getPlan().getRootTasks());
       Assert.assertEquals(1, sparkTasks.size());
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
index b38b128..c1081f2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
@@ -77,7 +77,7 @@ public class TestHooks {
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
         "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
     Driver driver = createDriver(conf);
-    int ret = driver.compile("select 'XXX' from t1", true);
+    int ret = driver.compile("select 'XXX' from t1");
     assertEquals("Checking command success", 0, ret);
     assertEquals("select 'AAA' from t1", conf.getQueryString());
   }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
index 1a1a7b1..005d420 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
@@ -59,6 +59,7 @@ import org.junit.Test;
 /**
  * Unittest for SymlinkTextInputFormat.
  */
+@SuppressWarnings("deprecation")
 public class TestSymlinkTextInputFormat {
   private static final Logger log =
       LoggerFactory.getLogger(TestSymlinkTextInputFormat.class);
@@ -112,17 +113,22 @@ public class TestSymlinkTextInputFormat {
   public void testCombine() throws Exception {
     JobConf newJob = new JobConf(job);
     FileSystem fs = dataDir1.getFileSystem(newJob);
+    int symbolLinkedFileSize = 0;
 
     Path dir1_file1 = new Path(dataDir1, "combinefile1_1");
     writeTextFile(dir1_file1,
                   "dir1_file1_line1\n" +
                   "dir1_file1_line2\n");
 
+    symbolLinkedFileSize += fs.getFileStatus(dir1_file1).getLen();
+
     Path dir2_file1 = new Path(dataDir2, "combinefile2_1");
     writeTextFile(dir2_file1,
                   "dir2_file1_line1\n" +
                   "dir2_file1_line2\n");
 
+    symbolLinkedFileSize += fs.getFileStatus(dir2_file1).getLen();
+
     // A symlink file, contains first file from first dir and second file from
     // second dir.
     writeSymlinkFile(
@@ -166,7 +172,7 @@ public class TestSymlinkTextInputFormat {
       }
 
       String cmd = "select key*1 from " + tblName;
-      ecode = drv.compile(cmd, true);
+      ecode = drv.compile(cmd);
       if (ecode != 0) {
         throw new Exception("Select compile: " + cmd
             + " failed with exit code= " + ecode);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
index 594688b..d2b9327 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java
@@ -59,7 +59,7 @@ public class TestColumnAccess {
   public void testQueryTable1() throws ParseException {
     String query = "select * from t1";
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertEquals("Checking command success", 0, rc);
     QueryPlan plan = driver.getPlan();
     // check access columns from ColumnAccessInfo
@@ -83,7 +83,7 @@ public class TestColumnAccess {
   public void testJoinTable1AndTable2() throws ParseException {
     String query = "select * from t1 join t2 on (t1.id1 = t2.id1)";
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertEquals("Checking command success", 0, rc);
     QueryPlan plan = driver.getPlan();
     // check access columns from ColumnAccessInfo
@@ -120,7 +120,7 @@ public class TestColumnAccess {
   public void testJoinView1AndTable2() throws ParseException {
     String query = "select * from v1 join t2 on (v1.id1 = t2.id1)";
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertEquals("Checking command success", 0, rc);
     QueryPlan plan = driver.getPlan();
     // check access columns from ColumnAccessInfo
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
index 5096763..0d87662 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
@@ -57,7 +57,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(66,7))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]"));
@@ -68,7 +68,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(0,7))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]"));
@@ -79,7 +79,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7,33))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision"));
@@ -90,7 +90,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7,-1))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("extraneous input '-' expecting Number"));
@@ -101,7 +101,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7,33,4))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
       driver.getErrorMsg().contains("missing ) at ',' near ',' in column name or constraint"));
@@ -112,7 +112,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(7a))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("mismatched input '7a' expecting Number near '('"));
@@ -123,7 +123,7 @@ public class TestHiveDecimalParse {
     String query = "create table `dec` (d decimal(20,23))";
 
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
         driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision"));
@@ -142,7 +142,7 @@ public class TestHiveDecimalParse {
 
   private String getColumnType(String query) {
     Driver driver = createDriver();
-    int rc = driver.compile(query, true);
+    int rc = driver.compile(query);
 
     if (rc != 0) {
       return null;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
index 51f610d..88edc12 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
@@ -76,7 +76,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityDirect() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from t1", true);
+    int ret = driver.compile("select * from t1");
     assertEquals("Checking command success", 0, ret);
     assertEquals(1, CheckInputReadEntityDirect.readEntities.size());
     assertTrue("isDirect", CheckInputReadEntityDirect.readEntities.iterator().next().isDirect());
@@ -90,7 +90,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityInDirect() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from v1", true);
+    int ret = driver.compile("select * from v1");
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
@@ -113,7 +113,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityViewDirectJoin() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)", true);
+    int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)");
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
@@ -136,7 +136,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityViewDirectUnion() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1", true);
+    int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1");
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
@@ -158,7 +158,7 @@ public class TestReadEntityDirect {
   @Test
   public void testSelectEntityInDirectJoinAlias() throws ParseException {
     Driver driver = createDriver();
-    int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)", true);
+    int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)");
     assertEquals("Checking command success", 0, ret);
     assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
     for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
index 97ef3c4..6d86ca2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
@@ -93,7 +93,7 @@ public class TestViewEntity {
         .getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view1, true);
+    driver.compile("select * from " + view1 );
     // view entity
     assertEquals("default@" + view1, CheckInputReadEntity.readEntities[0].getName());
 
@@ -128,7 +128,7 @@ public class TestViewEntity {
     ret = driver.run("create view " + view1 + " as select * from " + tab1).getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view1, true);
+    driver.compile("select * from " + view1 );
     // view entity
     assertEquals("default@" + view1, CheckInputReadEntity.readEntities[0].getName());
 
@@ -204,7 +204,7 @@ public class TestViewEntity {
     assertEquals("Checking command success", 0, ret);
 
     //select from view2
-    driver.compile("select * from " + view2, true);
+    driver.compile("select * from " + view2);
 
     //verify that only view2 is direct input in above query
     ReadEntity[] readEntities = CheckInputReadEntity.readEntities;
@@ -243,7 +243,7 @@ public class TestViewEntity {
     ret = driver.run("create view " + view2 + " as select * from (select * from " + view1 + ") x").getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view2, true);
+    driver.compile("select * from " + view2);
     // view entity
     assertEquals("default@" + view2, CheckInputReadEntity.readEntities[0].getName());
 
@@ -277,7 +277,7 @@ public class TestViewEntity {
     ret = driver.run("create view " + view2 + " as select * from (select * from " + view1 + " union all select * from " + view1 + ") x").getResponseCode();
     assertEquals("Checking command success", 0, ret);
 
-    driver.compile("select * from " + view2, true);
+    driver.compile("select * from " + view2);
     // view entity
     assertEquals("default@" + view2, CheckInputReadEntity.readEntities[0].getName());
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java
index 0f45c81..9b50a0d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java
@@ -99,7 +99,7 @@ public class TestHivePrivilegeObjectOwnerNameAndType {
   @Test
   public void testOwnerNames() throws Exception {
     reset(mockedAuthorizer);
-    driver.compile("create table default.t1 (name string)", true);
+    driver.compile("create table default.t1 (name string)");
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
     boolean containsDBOwnerName = false;
@@ -129,7 +129,7 @@ public class TestHivePrivilegeObjectOwnerNameAndType {
   @Test
   public void testOwnerType() throws Exception {
     reset(mockedAuthorizer);
-    driver.compile("create table default.t1 (name string)", true);
+    driver.compile("create table default.t1 (name string)");
 
     Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
     boolean containsOwnerType = false;
@@ -164,8 +164,7 @@ public class TestHivePrivilegeObjectOwnerNameAndType {
         .checkPrivileges(any(HiveOperationType.class), inputsCapturer.capture(), outputsCapturer.capture(),
             any(HiveAuthzContext.class));
 
-    return new ImmutablePair<List<HivePrivilegeObject>, List<HivePrivilegeObject>>(
-        inputsCapturer.getValue(), outputsCapturer.getValue());
+    return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue());
   }
 
 }