You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2019/12/03 09:30:33 UTC

[hive] branch master updated (c8b7648 -> 27a1524)

This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git.


    from c8b7648  HIVE-22544: Disable null sort order at user level (Krisztian Kasa, reviewed by Jesus Camacho Rodriguez)
     new 1039894  HIVE-22524: CommandProcessorException should utilize standard Exception fields (Zoltan Haindrich reviewed by Miklos Gergely)
     new 27a1524  HIVE-22521: Both Driver and SessionState has a userName (Zoltan Haindrich reviewed by László Bodor)

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../java/org/apache/hadoop/hive/cli/CliDriver.java |  2 +-
 .../hive/hcatalog/cli/TestSemanticAnalysis.java    |  6 +--
 .../hive/hcatalog/pig/AbstractHCatStorerTest.java  |  4 +-
 .../TestDDLWithRemoteMetastoreSecondNamenode.java  |  2 +-
 .../ql/exec/spark/TestSmallTableCacheEviction.java |  2 +-
 .../ql/exec/spark/TestSparkSessionTimeout.java     |  2 +-
 .../hive/ql/exec/spark/TestSparkStatistics.java    |  2 +-
 .../hive/ql/parse/TestReplicationScenarios.java    |  2 +-
 .../TestReplicationScenariosAcrossInstances.java   |  4 +-
 .../hadoop/hive/ql/parse/WarehouseInstance.java    |  2 +-
 .../TestClientSideAuthorizationProvider.java       |  2 +-
 ...torageBasedClientSideAuthorizationProvider.java |  2 +-
 .../hive/schq/TestScheduledQueryIntegration.java   |  2 +-
 .../control/AbstractCoreBlobstoreCliDriver.java    |  2 +-
 .../hive/cli/control/CoreAccumuloCliDriver.java    |  2 +-
 .../hadoop/hive/cli/control/CoreCliDriver.java     |  2 +-
 .../hive/cli/control/CoreCompareCliDriver.java     |  2 +-
 .../hive/cli/control/CoreHBaseCliDriver.java       |  2 +-
 .../hadoop/hive/cli/control/CoreKuduCliDriver.java |  2 +-
 .../hadoop/hive/cli/control/CorePerfCliDriver.java |  2 +-
 .../java/org/apache/hadoop/hive/ql/QTestUtil.java  |  2 +-
 .../hadoop/hive/llap/cli/service/package-info.java | 23 -----------
 .../hadoop/hive/llap/cli/status/package-info.java  | 23 -----------
 .../hadoop/hive/llap/metrics/package-info.java     | 23 -----------
 .../hive/llap/registry/impl/package-info.java      | 23 -----------
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  | 44 ++++++++++------------
 .../org/apache/hadoop/hive/ql/DriverContext.java   |  8 +---
 .../org/apache/hadoop/hive/ql/DriverFactory.java   |  8 ++--
 .../org/apache/hadoop/hive/ql/DriverUtils.java     |  2 +-
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |  8 ++--
 .../hive/ql/parse/ExplainSemanticAnalyzer.java     |  2 +-
 .../ql/processors/CommandProcessorException.java   | 21 +++--------
 .../apache/hadoop/hive/ql/reexec/ReExecDriver.java |  5 +--
 .../scheduled/ScheduledQueryExecutionService.java  |  4 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java       |  2 +-
 .../hive/ql/udf/generic/GenericUDTFGetSplits.java  |  2 +-
 .../apache/hadoop/hive/ql/TestTxnAddPartition.java |  4 +-
 .../org/apache/hadoop/hive/ql/TestTxnCommands.java | 20 +++++-----
 .../apache/hadoop/hive/ql/TestTxnCommands2.java    |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands3.java    |  2 +-
 .../apache/hadoop/hive/ql/TestTxnNoBuckets.java    |  6 +--
 .../hadoop/hive/ql/TxnCommandsBaseForTests.java    |  2 +-
 .../ql/exec/spark/TestLocalHiveSparkClient.java    |  2 +-
 .../ql/exec/spark/TestSparkInvalidFileFormat.java  |  6 +--
 .../hadoop/hive/ql/lockmgr/TestDbTxnManager2.java  | 12 +++---
 .../hadoop/hive/ql/parse/TestHiveDecimalParse.java | 28 +++++++-------
 .../hive/service/cli/operation/Operation.java      |  6 +--
 .../hive/service/cli/operation/SQLOperation.java   |  2 +-
 48 files changed, 112 insertions(+), 228 deletions(-)
 delete mode 100644 llap-server/src/test/org/apache/hadoop/hive/llap/cli/service/package-info.java
 delete mode 100644 llap-server/src/test/org/apache/hadoop/hive/llap/cli/status/package-info.java
 delete mode 100644 llap-server/src/test/org/apache/hadoop/hive/llap/metrics/package-info.java
 delete mode 100644 llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/package-info.java


[hive] 01/02: HIVE-22524: CommandProcessorException should utilize standard Exception fields (Zoltan Haindrich reviewed by Miklos Gergely)

Posted by kg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 103989416781b27f1ca9d7661844d3d6a238e8c6
Author: Zoltan Haindrich <ki...@rxd.hu>
AuthorDate: Thu Nov 28 13:29:10 2019 +0000

    HIVE-22524: CommandProcessorException should utilize standard Exception fields (Zoltan Haindrich reviewed by Miklos Gergely)
    
    Signed-off-by: Zoltan Haindrich <zh...@cloudera.com>
---
 .../java/org/apache/hadoop/hive/cli/CliDriver.java |  2 +-
 .../hive/hcatalog/cli/TestSemanticAnalysis.java    |  6 ++---
 .../hive/hcatalog/pig/AbstractHCatStorerTest.java  |  4 ++--
 .../TestDDLWithRemoteMetastoreSecondNamenode.java  |  2 +-
 .../hive/ql/parse/TestReplicationScenarios.java    |  2 +-
 .../TestReplicationScenariosAcrossInstances.java   |  4 ++--
 .../hadoop/hive/ql/parse/WarehouseInstance.java    |  2 +-
 .../TestClientSideAuthorizationProvider.java       |  2 +-
 ...torageBasedClientSideAuthorizationProvider.java |  2 +-
 .../hive/schq/TestScheduledQueryIntegration.java   |  2 +-
 .../control/AbstractCoreBlobstoreCliDriver.java    |  2 +-
 .../hive/cli/control/CoreAccumuloCliDriver.java    |  2 +-
 .../hadoop/hive/cli/control/CoreCliDriver.java     |  2 +-
 .../hive/cli/control/CoreCompareCliDriver.java     |  2 +-
 .../hive/cli/control/CoreHBaseCliDriver.java       |  2 +-
 .../hadoop/hive/cli/control/CoreKuduCliDriver.java |  2 +-
 .../hadoop/hive/cli/control/CorePerfCliDriver.java |  2 +-
 .../java/org/apache/hadoop/hive/ql/QTestUtil.java  |  2 +-
 .../hadoop/hive/llap/cli/service/package-info.java | 23 ------------------
 .../hadoop/hive/llap/cli/status/package-info.java  | 23 ------------------
 .../hadoop/hive/llap/metrics/package-info.java     | 23 ------------------
 .../hive/llap/registry/impl/package-info.java      | 23 ------------------
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  | 18 +++++++-------
 .../hive/ql/parse/ExplainSemanticAnalyzer.java     |  2 +-
 .../ql/processors/CommandProcessorException.java   | 21 ++++------------
 .../scheduled/ScheduledQueryExecutionService.java  |  2 +-
 .../apache/hadoop/hive/ql/TestTxnAddPartition.java |  4 ++--
 .../org/apache/hadoop/hive/ql/TestTxnCommands.java | 18 +++++++-------
 .../apache/hadoop/hive/ql/TestTxnNoBuckets.java    |  6 ++---
 .../ql/exec/spark/TestSparkInvalidFileFormat.java  |  6 ++---
 .../hadoop/hive/ql/lockmgr/TestDbTxnManager2.java  |  8 +++----
 .../hadoop/hive/ql/parse/TestHiveDecimalParse.java | 28 +++++++++++-----------
 .../hive/service/cli/operation/Operation.java      |  6 ++---
 33 files changed, 76 insertions(+), 179 deletions(-)

diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
index c6f8946..65062ae 100644
--- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
+++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
@@ -310,7 +310,7 @@ public class CliDriver {
           }
           return res;
         } catch (CommandProcessorException e) {
-          ss.out.println("Query returned non-zero code: " + e.getResponseCode() + ", cause: " + e.getErrorMessage());
+          ss.out.println("Query returned non-zero code: " + e.getResponseCode() + ", cause: " + e.getMessage());
           throw e;
         }
       }
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
index ecd6632..b5f29f5 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
@@ -189,7 +189,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
       hcatDriver.run("alter table " + TBL_NAME + " touch partition (b='12')");
       fail("Expected that the command 'alter table " + TBL_NAME + " touch partition (b='12')' would fail");
     } catch (CommandProcessorException e) {
-      assertTrue(e.getErrorMessage().contains("Specified partition does not exist"));
+      assertTrue(e.getMessage().contains("Specified partition does not exist"));
     }
 
     hcatDriver.run("drop table " + TBL_NAME);
@@ -306,7 +306,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
       assert false;
     } catch (CommandProcessorException e) {
       assertEquals(40000, e.getResponseCode());
-      assertTrue(e.getErrorMessage().contains(
+      assertTrue(e.getMessage().contains(
           "FAILED: SemanticException Operation not supported. Create table as Select is not a valid operation."));
     }
     hcatDriver.run("drop table junit_sem_analysis");
@@ -347,7 +347,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
     } catch (CommandProcessorException e) {
       assertEquals(40000, e.getResponseCode());
       assertEquals("FAILED: SemanticException Operation not supported. HCatalog only supports partition columns of " +
-          "type string. For column: b Found type: int", e.getErrorMessage());
+          "type string. For column: b Found type: int", e.getMessage());
     }
   }
 
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
index f37b216..f7d668b 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
@@ -302,7 +302,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
     try {
       driver.run("select * from " + tblName);
     } catch (CommandProcessorException e) {
-      LOG.debug("cpr.respCode=" + e.getResponseCode() + " cpr.errMsg=" + e.getErrorMessage() + " for table " + tblName);
+      LOG.debug("cpr.respCode=" + e.getResponseCode() + " cpr.errMsg=" + e.getMessage() + " for table " + tblName);
     }
     List l = new ArrayList();
     driver.getResults(l);
@@ -370,7 +370,7 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
     try {
       driver.run("select * from " + tblName);
     } catch (CommandProcessorException e) {
-      LOG.debug("cpr.respCode=" + e.getResponseCode() + " cpr.errMsg=" + e.getErrorMessage());
+      LOG.debug("cpr.respCode=" + e.getResponseCode() + " cpr.errMsg=" + e.getMessage());
     }
     List l = new ArrayList();
     driver.getResults(l);
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
index ed12028..bbc6ae4 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java
@@ -163,7 +163,7 @@ public class TestDDLWithRemoteMetastoreSecondNamenode {
       assertNotNull("driver.run() was expected to return result for query: " + query, result);
     } catch (CommandProcessorException e) {
       throw new RuntimeException("Execution of (" + query + ") failed with exit status: " +
-          e.getResponseCode() + ", " + e.getErrorMessage() + ", query: " + query);
+          e.getResponseCode() + ", " + e.getMessage() + ", query: " + query);
     }
   }
 
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 823fabb..0d64780 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -3655,7 +3655,7 @@ public class TestReplicationScenarios {
       myDriver.run(cmd);
       success = true;
     } catch (CommandProcessorException e) {
-      LOG.warn("Error {} : {} running [{}].", e.getErrorCode(), e.getErrorMessage(), cmd);
+      LOG.warn("Error {} : {} running [{}].", e.getErrorCode(), e.getMessage(), cmd);
     }
     return success;
   }
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
index 8df5fc8..fd4f2dc 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java
@@ -995,7 +995,7 @@ public class TestReplicationScenariosAcrossInstances extends BaseReplicationAcro
       replica.runCommand("REPL LOAD someJunkDB from '" + tuple.dumpLocation + "'");
       assert false;
     } catch (CommandProcessorException e) {
-      assertTrue(e.getErrorMessage().toLowerCase().contains(
+      assertTrue(e.getMessage().toLowerCase().contains(
           "org.apache.hadoop.hive.ql.ddl.DDLTask. Database does not exist: someJunkDB".toLowerCase()));
     }
 
@@ -1004,7 +1004,7 @@ public class TestReplicationScenariosAcrossInstances extends BaseReplicationAcro
     try {
       replica.runCommand("REPL LOAD someJunkDB from '" + tuple.dumpLocation+"'");
     } catch (CommandProcessorException e) {
-      assertTrue(e.getErrorMessage().toLowerCase().contains("semanticException no data to load in path".toLowerCase()));
+      assertTrue(e.getMessage().toLowerCase().contains("semanticException no data to load in path".toLowerCase()));
     }
 
     primary.run(" drop database if exists " + testDbName + " cascade");
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
index c141441..897a401 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
@@ -232,7 +232,7 @@ public class WarehouseInstance implements Closeable {
       driver.run(command);
       return this;
     } catch (CommandProcessorException e) {
-      throw e.getException();
+      throw e.getCause();
     }
   }
 
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
index 43e1624..948ab4d 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
@@ -237,7 +237,7 @@ public class TestClientSideAuthorizationProvider {
   protected void assertNoPrivileges(CommandProcessorException ret){
     assertNotNull(ret);
     assertFalse(0 == ret.getResponseCode());
-    assertTrue(ret.getErrorMessage().indexOf("No privilege") != -1);
+    assertTrue(ret.getMessage().indexOf("No privilege") != -1);
   }
 
 
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java
index 5cce4a5..839a32c 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedClientSideAuthorizationProvider.java
@@ -94,7 +94,7 @@ public class TestStorageBasedClientSideAuthorizationProvider extends
   protected void assertNoPrivileges(CommandProcessorException e){
     assertNotNull(e);
     assertFalse(0 == e.getResponseCode());
-    assertTrue(e.getErrorMessage().indexOf("AccessControlException") != -1);
+    assertTrue(e.getMessage().indexOf("AccessControlException") != -1);
   }
 
 
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/schq/TestScheduledQueryIntegration.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/schq/TestScheduledQueryIntegration.java
index 9d1ca5b..1c0054d 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/schq/TestScheduledQueryIntegration.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/schq/TestScheduledQueryIntegration.java
@@ -95,7 +95,7 @@ public class TestScheduledQueryIntegration {
       runAsUser("user2", "drop table t1");
       fail("Exception expected");
     } catch (CommandProcessorException cpe) {
-      assertThat(cpe.getErrorMessage(), Matchers.containsString("HiveAccessControlException Permission denied"));
+      assertThat(cpe.getMessage(), Matchers.containsString("HiveAccessControlException Permission denied"));
     }
     runAsUser("user1", "drop table t1");
   }
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
index 3c0ba14..447d42d 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
@@ -156,7 +156,7 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
         }
       } catch (CommandProcessorException e) {
         if (expectSuccess) {
-          qt.failedQuery(e.getException(), e.getResponseCode(), fname, debugHint);
+          qt.failedQuery(e.getCause(), e.getResponseCode(), fname, debugHint);
         }
       }
 
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
index e6c5e70..31f5cfa 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
@@ -110,7 +110,7 @@ public class CoreAccumuloCliDriver extends CliAdapter {
       try {
         qt.executeClient(fname);
       } catch (CommandProcessorException e) {
-        qt.failedQuery(e.getException(), e.getResponseCode(), fname, null);
+        qt.failedQuery(e.getCause(), e.getResponseCode(), fname, null);
       }
 
       QTestProcessExecResult result = qt.checkCliDriverResults(fname);
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index 7a90309..171e2d9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -169,7 +169,7 @@ public class CoreCliDriver extends CliAdapter {
         qt.executeClient(fname);
       } catch (CommandProcessorException e) {
         failed = true;
-        qt.failedQuery(e.getException(), e.getResponseCode(), fname, QTestUtil.DEBUG_HINT);
+        qt.failedQuery(e.getCause(), e.getResponseCode(), fname, QTestUtil.DEBUG_HINT);
       }
 
       setupAdditionalPartialMasks();
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
index 9a519ff..0900c92 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
@@ -147,7 +147,7 @@ public class CoreCompareCliDriver extends CliAdapter{
         try {
           qt.executeClient(versionFile, fname);
         } catch (CommandProcessorException e) {
-          qt.failedQuery(e.getException(), e.getResponseCode(), fname, QTestUtil.DEBUG_HINT);
+          qt.failedQuery(e.getCause(), e.getResponseCode(), fname, QTestUtil.DEBUG_HINT);
         }
       }
 
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
index 5cad1e2..9350b82 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
@@ -116,7 +116,7 @@ public class CoreHBaseCliDriver extends CliAdapter {
       try {
         qt.executeClient(fname);
       } catch (CommandProcessorException e) {
-        qt.failedQuery(e.getException(), e.getResponseCode(), fname, null);
+        qt.failedQuery(e.getCause(), e.getResponseCode(), fname, null);
       }
 
       QTestProcessExecResult result = qt.checkCliDriverResults(fname);
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java
index ab9c36f..8f54b52 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java
@@ -118,7 +118,7 @@ public class CoreKuduCliDriver extends CliAdapter {
       try {
         qt.executeClient(fname);
       } catch (CommandProcessorException e) {
-        qt.failedQuery(e.getException(), e.getResponseCode(), fname, null);
+        qt.failedQuery(e.getCause(), e.getResponseCode(), fname, null);
       }
 
       QTestProcessExecResult result = qt.checkCliDriverResults(fname);
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
index 06d1e16..d7faaf6 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
@@ -139,7 +139,7 @@ public class CorePerfCliDriver extends CliAdapter {
       try {
         qt.executeClient(fname);
       } catch (CommandProcessorException e) {
-        qt.failedQuery(e.getException(), e.getResponseCode(), fname, QTestUtil.DEBUG_HINT);
+        qt.failedQuery(e.getCause(), e.getResponseCode(), fname, QTestUtil.DEBUG_HINT);
       }
 
       QTestProcessExecResult result = qt.checkCliDriverResults(fname);
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 51d3672..024dd82 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -762,7 +762,7 @@ public class QTestUtil {
           return response;
         } catch (CommandProcessorException e) {
           SessionState.getConsole().printError(e.toString(),
-                  e.getException() != null ? Throwables.getStackTraceAsString(e.getException()) : "");
+                  e.getCause() != null ? Throwables.getStackTraceAsString(e.getCause()) : "");
           throw e;
         }
       } else {
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cli/service/package-info.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cli/service/package-info.java
deleted file mode 100644
index e8746d2..0000000
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cli/service/package-info.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Package consisting the tests for the program LlapServiceDriver and other classes used by it.
- */
-package org.apache.hadoop.hive.llap.cli.service;
-
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cli/status/package-info.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cli/status/package-info.java
deleted file mode 100644
index 9af5dd8..0000000
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/cli/status/package-info.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Package consisting the tests for the program LlapStatusServiceDriver and other classes used by it.
- */
-package org.apache.hadoop.hive.llap.cli.status;
-
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/metrics/package-info.java b/llap-server/src/test/org/apache/hadoop/hive/llap/metrics/package-info.java
deleted file mode 100644
index 4902515..0000000
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/metrics/package-info.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Test classes for metrics package.
- */
-
-package org.apache.hadoop.hive.llap.metrics;
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/package-info.java b/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/package-info.java
deleted file mode 100644
index d2acc5d..0000000
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/registry/impl/package-info.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Test classes for registry implementations.
- */
-
-package org.apache.hadoop.hive.llap.registry.impl;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 54d12ba..1574982 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -1188,8 +1188,8 @@ public class Driver implements IDriver {
        * the error is a specific/expected one.
        * It's written to stdout for backward compatibility (WebHCat consumes it).*/
       try {
-        if (cpe.getException() == null) {
-          mdf.error(ss.out, cpe.getErrorMessage(), cpe.getResponseCode(), cpe.getSqlState());
+        if (cpe.getCause() == null) {
+          mdf.error(ss.out, cpe.getMessage(), cpe.getResponseCode(), cpe.getSqlState());
           throw cpe;
         }
         ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpe.getResponseCode());
@@ -1199,17 +1199,17 @@ public class Driver implements IDriver {
             (e.g. #compile()) to find an appropriate canonical error and
             return its code as error code. In this case we want to
             preserve it for downstream code to interpret*/
-          mdf.error(ss.out, cpe.getErrorMessage(), cpe.getResponseCode(), cpe.getSqlState(), null);
+          mdf.error(ss.out, cpe.getMessage(), cpe.getResponseCode(), cpe.getSqlState(), null);
           throw cpe;
         }
-        if (cpe.getException() instanceof HiveException) {
-          HiveException rc = (HiveException)cpe.getException();
-          mdf.error(ss.out, cpe.getErrorMessage(), rc.getCanonicalErrorMsg().getErrorCode(), cpe.getSqlState(),
+        if (cpe.getCause() instanceof HiveException) {
+          HiveException rc = (HiveException)cpe.getCause();
+          mdf.error(ss.out, cpe.getMessage(), rc.getCanonicalErrorMsg().getErrorCode(), cpe.getSqlState(),
               rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? StringUtils.stringifyException(rc) : null);
         } else {
-          ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(cpe.getException().getMessage());
-          mdf.error(ss.out, cpe.getErrorMessage(), canonicalMsg.getErrorCode(), cpe.getSqlState(),
-              StringUtils.stringifyException(cpe.getException()));
+          ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(cpe.getCause().getMessage());
+          mdf.error(ss.out, cpe.getMessage(), canonicalMsg.getErrorCode(), cpe.getSqlState(),
+              StringUtils.stringifyException(cpe.getCause()));
         }
       } catch (HiveException ex) {
         CONSOLE.printError("Unable to JSON-encode the error", StringUtils.stringifyException(ex));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
index dd49e3a..df6e5e5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
@@ -152,7 +152,7 @@ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
           while (driver.getResults(new ArrayList<String>())) {
           }
         } catch (CommandProcessorException e) {
-          throw new SemanticException(e.getErrorMessage(), e);
+          throw new SemanticException(e.getMessage(), e);
         }
         config.setOpIdToRuntimeNumRows(aggregateStats(config.getExplainRootPath()));
       } catch (IOException e1) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java
index 3679996..37fbc20 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java
@@ -26,9 +26,7 @@ public class CommandProcessorException extends Exception {
 
   private final int responseCode;
   private final int hiveErrorCode;
-  private final String errorMessage;
   private final String sqlState;
-  private final Throwable exception;
 
   public CommandProcessorException(int responseCode) {
     this(responseCode, -1, null, null, null);
@@ -48,11 +46,10 @@ public class CommandProcessorException extends Exception {
 
   public CommandProcessorException(int responseCode, int hiveErrorCode, String errorMessage, String sqlState,
       Throwable exception) {
+    super(errorMessage, exception);
     this.responseCode = responseCode;
     this.hiveErrorCode = hiveErrorCode;
-    this.errorMessage = errorMessage;
     this.sqlState = sqlState;
-    this.exception = exception;
   }
 
   public int getResponseCode() {
@@ -63,23 +60,15 @@ public class CommandProcessorException extends Exception {
     return hiveErrorCode;
   }
 
-  public String getErrorMessage() {
-    return errorMessage;
-  }
-
   public String getSqlState() {
     return sqlState;
   }
 
-  public Throwable getException() {
-    return exception;
-  }
-
   @Override
   public String toString() {
-    return "(responseCode = " + responseCode + ", errorMessage = " + errorMessage + ", " +
-      (hiveErrorCode > 0 ? "hiveErrorCode = " + hiveErrorCode + ", " : "") +
-      "SQLState = " + sqlState +
-      (exception == null ? "" : ", exception = " + exception.getMessage()) + ")";
+    return "(responseCode = " + responseCode + ", errorMessage = " + getMessage() + ", "
+        + (hiveErrorCode > 0 ? "hiveErrorCode = " + hiveErrorCode + ", " : "") + "SQLState = " + sqlState
+        + (getCause() == null ? "" : ", exception = " + getCause().getMessage()) + ")";
   }
+
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java b/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java
index 551d1f8..5dd105f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java
@@ -145,7 +145,7 @@ public class ScheduledQueryExecutionService implements Closeable {
     private String getErrorStringForException(Throwable t) {
       if (t instanceof CommandProcessorException) {
         CommandProcessorException cpr = (CommandProcessorException) t;
-        return String.format("%s", cpr.getErrorMessage());
+        return String.format("%s", cpr.getMessage());
       } else {
         return String.format("%s: %s", t.getClass().getName(), t.getMessage());
       }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnAddPartition.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnAddPartition.java
index 0edc912..c9cb669 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnAddPartition.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnAddPartition.java
@@ -117,7 +117,7 @@ public class TestTxnAddPartition extends TxnCommandsBaseForTests {
     CommandProcessorException e =
         runStatementOnDriverNegative("ALTER TABLE T ADD PARTITION (p=0) location '" + getWarehouseDir() + "/3/data'");
     Assert.assertTrue("add existing partition",
-        e.getErrorMessage() != null && e.getErrorMessage().contains("Partition already exists"));
+        e.getMessage() != null && e.getMessage().contains("Partition already exists"));
 
     //should be no-op since p=3 exists
     String stmt = "ALTER TABLE T ADD IF NOT EXISTS " +
@@ -191,7 +191,7 @@ public class TestTxnAddPartition extends TxnCommandsBaseForTests {
     CommandProcessorException e =
         runStatementOnDriverNegative("ALTER TABLE T ADD PARTITION (p=0) location '" + getWarehouseDir() + "/3/data'");
     Assert.assertTrue("add existing partition",
-        e.getErrorMessage() != null && e.getErrorMessage().contains("Partition already exists"));
+        e.getMessage() != null && e.getMessage().contains("Partition already exists"));
 
     //should be no-op since p=3 exists
     runStatementOnDriver("ALTER TABLE T ADD IF NOT EXISTS " +
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
index 1da01be..6e7b201 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
@@ -533,7 +533,7 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
     CommandProcessorException e2 = runStatementOnDriverNegative("update " + Table.ACIDTBL + " set a = 1 where b != 1");
     Assert.assertEquals("Expected update of bucket column to fail",
         "FAILED: SemanticException [Error 10302]: Updating values of bucketing columns is not supported.  Column a.",
-        e2.getErrorMessage());
+        e2.getMessage());
     Assert.assertEquals("Expected update of bucket column to fail",
         ErrorMsg.UPDATE_CANNOT_UPDATE_BUCKET_VALUE.getErrorCode(), e2.getErrorCode());
     CommandProcessorException e3 = runStatementOnDriverNegative("commit"); //not allowed in w/o tx
@@ -579,7 +579,7 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
     CommandProcessorException e = runStatementOnDriverNegative("select * from no_such_table");
     Assert.assertEquals("Txn didn't fail?",
         "FAILED: SemanticException [Error 10001]: Line 1:14 Table not found 'no_such_table'",
-        e.getErrorMessage());
+        e.getMessage());
     runStatementOnDriver("start transaction");
     List<String> rs1 = runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
     runStatementOnDriver("commit");
@@ -732,8 +732,8 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
     houseKeeperService.run();
     //this should fail because txn aborted due to timeout
     CommandProcessorException e = runStatementOnDriverNegative("delete from " + Table.ACIDTBL + " where a = 5");
-    Assert.assertTrue("Actual: " + e.getErrorMessage(),
-        e.getErrorMessage().contains("Transaction manager has aborted the transaction txnid:1"));
+    Assert.assertTrue("Actual: " + e.getMessage(),
+        e.getMessage().contains("Transaction manager has aborted the transaction txnid:1"));
 
     //now test that we don't timeout locks we should not
     //heartbeater should be running in the background every 1/2 second
@@ -819,7 +819,7 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
         "WHEN MATCHED THEN UPDATE set b = 1\n" +
         "WHEN MATCHED THEN DELETE\n" +
         "WHEN NOT MATCHED AND a < 1 THEN INSERT VALUES(1,2)");
-    Assert.assertEquals(ErrorMsg.MERGE_PREDIACTE_REQUIRED, ((HiveException)e.getException()).getCanonicalErrorMsg());
+    Assert.assertEquals(ErrorMsg.MERGE_PREDIACTE_REQUIRED, ((HiveException)e.getCause()).getCanonicalErrorMsg());
   }
   @Test
   public void testMergeNegative2() throws Exception {
@@ -828,7 +828,7 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
         " target USING " + Table.NONACIDORCTBL + "\n source ON target.pk = source.pk " +
         "\nWHEN MATCHED THEN UPDATE set b = 1 " +
         "\nWHEN MATCHED THEN UPDATE set b=a");
-    Assert.assertEquals(ErrorMsg.MERGE_TOO_MANY_UPDATE, ((HiveException)e.getException()).getCanonicalErrorMsg());
+    Assert.assertEquals(ErrorMsg.MERGE_TOO_MANY_UPDATE, ((HiveException)e.getCause()).getCanonicalErrorMsg());
   }
 
   /**
@@ -1097,11 +1097,11 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
         " source ON target.a = source.a\n" +
         "WHEN MATCHED THEN UPDATE set t = 1");
     Assert.assertEquals(ErrorMsg.INVALID_TARGET_COLUMN_IN_SET_CLAUSE,
-        ((HiveException)e1.getException()).getCanonicalErrorMsg());
+        ((HiveException)e1.getCause()).getCanonicalErrorMsg());
 
     CommandProcessorException e2 = runStatementOnDriverNegative("update " + Table.ACIDTBL + " set t = 1");
     Assert.assertEquals(ErrorMsg.INVALID_TARGET_COLUMN_IN_SET_CLAUSE,
-        ((HiveException)e2.getException()).getCanonicalErrorMsg());
+        ((HiveException)e2.getCause()).getCanonicalErrorMsg());
   }
 
   @Test
@@ -1112,7 +1112,7 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
             "using (select *\n" +
             "       from " + Table.NONACIDORCTBL + " src) sub on sub.a = target.a\n" +
             "when not matched then insert values (sub.a,sub.b)");
-    Assert.assertTrue("Error didn't match: " + e, e.getErrorMessage().contains(
+    Assert.assertTrue("Error didn't match: " + e, e.getMessage().contains(
         "No columns from target table 'trgt' found in ON clause '`sub`.`a` = `target`.`a`' of MERGE statement."));
   }
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
index 4a8be40..ea6b1d9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnNoBuckets.java
@@ -356,7 +356,7 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree /Users/ekoifman/dev/hiver
             TxnCommandsBaseForTests.Table.ACIDTBL +
             " where a between 5 and 7 union all select a, b from " +
             TxnCommandsBaseForTests.Table.ACIDTBL + " where a >= 9");
-    Assert.assertTrue("", e.getErrorMessage().contains("not supported due to OVERWRITE and UNION ALL"));
+    Assert.assertTrue("", e.getMessage().contains("not supported due to OVERWRITE and UNION ALL"));
   }
   /**
    * The idea here is to create a non acid table that was written by multiple writers, i.e.
@@ -632,7 +632,7 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree /Users/ekoifman/dev/hiver
       "select a, b from " + Table.NONACIDORCTBL);
     ErrorMsg.CTAS_PARCOL_COEXISTENCE.getErrorCode(); //this code doesn't propagate
 //    Assert.assertEquals("Wrong msg", ErrorMsg.CTAS_PARCOL_COEXISTENCE.getErrorCode(), cpr.getErrorCode());
-    Assert.assertTrue(e.getErrorMessage().contains("CREATE-TABLE-AS-SELECT does not support"));
+    Assert.assertTrue(e.getMessage().contains("CREATE-TABLE-AS-SELECT does not support"));
   }
   /**
    * Currently CTAS doesn't support partitioned tables.  Correspondingly Acid only supports CTAS for
@@ -646,7 +646,7 @@ ekoifman:apache-hive-3.0.0-SNAPSHOT-bin ekoifman$ tree /Users/ekoifman/dev/hiver
         "by (b int) stored as " +
         "ORC TBLPROPERTIES ('transactional'='true') as select a, b from " + Table.NONACIDORCTBL);
     ErrorMsg.CTAS_PARCOL_COEXISTENCE.getErrorCode(); //this code doesn't propagate
-    Assert.assertTrue(e.getErrorMessage().contains("CREATE-TABLE-AS-SELECT does not support " +
+    Assert.assertTrue(e.getMessage().contains("CREATE-TABLE-AS-SELECT does not support " +
         "partitioning in the target table"));
   }
   /**
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkInvalidFileFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkInvalidFileFormat.java
index 3a66276..0889121 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkInvalidFileFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestSparkInvalidFileFormat.java
@@ -64,9 +64,9 @@ public class TestSparkInvalidFileFormat {
         driver.run("SELECT * FROM test_table ORDER BY key LIMIT 10");
         assert false;
       } catch (CommandProcessorException e) {
-        Assert.assertTrue(e.getException() instanceof HiveException);
-        Assert.assertTrue(e.getException().getMessage().contains("Spark job failed due to task failures"));
-        Assert.assertTrue(e.getException().getMessage().contains("kv1.txt is not a Parquet file. expected " +
+        Assert.assertTrue(e.getCause() instanceof HiveException);
+        Assert.assertTrue(e.getCause().getMessage().contains("Spark job failed due to task failures"));
+        Assert.assertTrue(e.getCause().getMessage().contains("kv1.txt is not a Parquet file. expected " +
               "magic number at tail [80, 65, 82, 49] but found [95, 57, 55, 10]"));
       }
     } finally {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
index 8f111b9..f9bbb28 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
@@ -397,7 +397,7 @@ public class TestDbTxnManager2 {
       assert false;
     } catch (CommandProcessorException e) {
       Assert.assertEquals(ErrorMsg.TXNMGR_NOT_ACID.getErrorCode(), e.getResponseCode());
-      Assert.assertTrue(e.getErrorMessage().contains("This command is not allowed on an ACID table"));
+      Assert.assertTrue(e.getMessage().contains("This command is not allowed on an ACID table"));
     }
 
     useDummyTxnManagerTemporarily(conf);
@@ -405,7 +405,7 @@ public class TestDbTxnManager2 {
       driver.compileAndRespond("insert into table T10 values (1, 2)", true);
     } catch (CommandProcessorException e) {
       Assert.assertEquals(ErrorMsg.TXNMGR_NOT_ACID.getErrorCode(), e.getResponseCode());
-      Assert.assertTrue(e.getErrorMessage().contains("This command is not allowed on an ACID table"));
+      Assert.assertTrue(e.getMessage().contains("This command is not allowed on an ACID table"));
     }
 
     useDummyTxnManagerTemporarily(conf);
@@ -413,7 +413,7 @@ public class TestDbTxnManager2 {
       driver.compileAndRespond("update T10 set a=0 where b=1", true);
     } catch (CommandProcessorException e) {
       Assert.assertEquals(ErrorMsg.ACID_OP_ON_NONACID_TXNMGR.getErrorCode(), e.getResponseCode());
-      Assert.assertTrue(e.getErrorMessage().contains("Attempt to do update or delete using transaction manager that does not support these operations."));
+      Assert.assertTrue(e.getMessage().contains("Attempt to do update or delete using transaction manager that does not support these operations."));
     }
 
     useDummyTxnManagerTemporarily(conf);
@@ -421,7 +421,7 @@ public class TestDbTxnManager2 {
       driver.compileAndRespond("delete from T10", true);
     } catch (CommandProcessorException e) {
       Assert.assertEquals(ErrorMsg.ACID_OP_ON_NONACID_TXNMGR.getErrorCode(), e.getResponseCode());
-      Assert.assertTrue(e.getErrorMessage().contains("Attempt to do update or delete using transaction manager that does not support these operations."));
+      Assert.assertTrue(e.getMessage().contains("Attempt to do update or delete using transaction manager that does not support these operations."));
     }
 
     conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
index 448532b..7d96e63 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
@@ -62,8 +62,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("Decimal precision out of allowed range [1,38]"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("Decimal precision out of allowed range [1,38]"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
@@ -78,8 +78,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("Decimal precision out of allowed range [1,38]"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("Decimal precision out of allowed range [1,38]"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
@@ -94,8 +94,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("Decimal scale must be less than or equal to precision"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("Decimal scale must be less than or equal to precision"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
@@ -110,8 +110,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("extraneous input '-' expecting Number"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("extraneous input '-' expecting Number"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
@@ -126,8 +126,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("missing ) at ',' near ',' in column name or constraint"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("missing ) at ',' near ',' in column name or constraint"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
@@ -142,8 +142,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("mismatched input '7a' expecting Number near '('"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("mismatched input '7a' expecting Number near '('"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
@@ -158,8 +158,8 @@ public class TestHiveDecimalParse {
       driver.compile(query, true, false);
     } catch (CommandProcessorException cpe) {
       Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
-      Assert.assertTrue(cpe.getErrorMessage(),
-          cpe.getErrorMessage().contains("Decimal scale must be less than or equal to precision"));
+      Assert.assertTrue(cpe.getMessage(),
+          cpe.getMessage().contains("Decimal scale must be less than or equal to precision"));
       return;
     }
     Assert.assertTrue("Expected to receive an exception", false);
diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 5036d59..af5c97e 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -353,9 +353,9 @@ public abstract class Operation {
 
   protected HiveSQLException toSQLException(String prefix, CommandProcessorException e) {
     HiveSQLException ex =
-        new HiveSQLException(prefix + ": " + e.getErrorMessage(), e.getSqlState(), e.getResponseCode());
-    if (e.getException() != null) {
-      ex.initCause(e.getException());
+        new HiveSQLException(prefix + ": " + e.getMessage(), e.getSqlState(), e.getResponseCode());
+    if (e.getCause() != null) {
+      ex.initCause(e.getCause());
     }
     return ex;
   }


[hive] 02/02: HIVE-22521: Both Driver and SessionState has a userName (Zoltan Haindrich reviewed by László Bodor)

Posted by kg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 27a15244a7d7d8ba7c09f580d2b862ca9419057f
Author: Zoltan Haindrich <ki...@rxd.hu>
AuthorDate: Thu Nov 28 14:01:22 2019 +0000

    HIVE-22521: Both Driver and SessionState has a userName (Zoltan Haindrich reviewed by László Bodor)
    
    Signed-off-by: Zoltan Haindrich <zh...@cloudera.com>
---
 .../ql/exec/spark/TestSmallTableCacheEviction.java |  2 +-
 .../ql/exec/spark/TestSparkSessionTimeout.java     |  2 +-
 .../hive/ql/exec/spark/TestSparkStatistics.java    |  2 +-
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java  | 26 +++++++++-------------
 .../org/apache/hadoop/hive/ql/DriverContext.java   |  8 +------
 .../org/apache/hadoop/hive/ql/DriverFactory.java   |  8 +++----
 .../org/apache/hadoop/hive/ql/DriverUtils.java     |  2 +-
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |  8 +++----
 .../apache/hadoop/hive/ql/reexec/ReExecDriver.java |  5 ++---
 .../scheduled/ScheduledQueryExecutionService.java  |  2 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java       |  2 +-
 .../hive/ql/udf/generic/GenericUDTFGetSplits.java  |  2 +-
 .../org/apache/hadoop/hive/ql/TestTxnCommands.java |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands2.java    |  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands3.java    |  2 +-
 .../hadoop/hive/ql/TxnCommandsBaseForTests.java    |  2 +-
 .../ql/exec/spark/TestLocalHiveSparkClient.java    |  2 +-
 .../hadoop/hive/ql/lockmgr/TestDbTxnManager2.java  |  4 ++--
 .../hive/service/cli/operation/SQLOperation.java   |  2 +-
 19 files changed, 36 insertions(+), 49 deletions(-)

diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSmallTableCacheEviction.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSmallTableCacheEviction.java
index 7bdb5e5..bf7f3dc 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSmallTableCacheEviction.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSmallTableCacheEviction.java
@@ -104,7 +104,7 @@ public class TestSmallTableCacheEviction {
     return new Driver(new QueryState.Builder()
             .withGenerateNewQueryId(true)
             .withHiveConf(conf).build(),
-            null, null);
+            null);
   }
 
   /**
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkSessionTimeout.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkSessionTimeout.java
index 51bd8a7..842aa9f 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkSessionTimeout.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkSessionTimeout.java
@@ -111,7 +111,7 @@ public class TestSparkSessionTimeout {
       driver = new Driver(new QueryState.Builder()
               .withGenerateNewQueryId(true)
               .withHiveConf(conf).build(),
-              null, null);
+              null);
 
       SparkSession sparkSession = SparkUtilities.getSparkSession(conf, SparkSessionManagerImpl
               .getInstance());
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
index 2b72d09..9b433a2 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/exec/spark/TestSparkStatistics.java
@@ -58,7 +58,7 @@ public class TestSparkStatistics {
       driver = new Driver(new QueryState.Builder()
               .withGenerateNewQueryId(true)
               .withHiveConf(conf).build(),
-              null, null);
+              null);
 
       driver.run("create table test (col int)");
       Assert.assertEquals(0, driver.compile("select * from test order by col", true));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 1574982..a28bf16 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -235,32 +235,26 @@ public class Driver implements IDriver {
 
   @VisibleForTesting
   public Driver(HiveConf conf) {
-    this(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build(), null);
+    this(new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build());
   }
 
   // Pass lineageState when a driver instantiates another Driver to run
   // or compile another query
   public Driver(HiveConf conf, Context ctx, LineageState lineageState) {
-    this(getNewQueryState(conf, lineageState), null, null);
+    this(getNewQueryState(conf, lineageState), null);
     this.ctx = ctx;
   }
 
-  // Pass lineageState when a driver instantiates another Driver to run
-  // or compile another query
-  public Driver(HiveConf conf, String userName, LineageState lineageState) {
-    this(getNewQueryState(conf, lineageState), userName, null);
-  }
-
-  public Driver(QueryState queryState, String userName) {
-    this(queryState, userName, null, null);
+  public Driver(QueryState queryState) {
+    this(queryState, null, null);
   }
 
-  public Driver(QueryState queryState, String userName, QueryInfo queryInfo) {
-    this(queryState, userName, queryInfo, null);
+  public Driver(QueryState queryState, QueryInfo queryInfo) {
+    this(queryState, queryInfo, null);
   }
 
-  public Driver(QueryState queryState, String userName, QueryInfo queryInfo, HiveTxnManager txnManager) {
-    driverContext = new DriverContext(queryState, queryInfo, userName, new HookRunner(queryState.getConf(), CONSOLE),
+  public Driver(QueryState queryState, QueryInfo queryInfo, HiveTxnManager txnManager) {
+    driverContext = new DriverContext(queryState, queryInfo, new HookRunner(queryState.getConf(), CONSOLE),
         txnManager);
   }
 
@@ -270,7 +264,7 @@ public class Driver implements IDriver {
    * @param lineageState a LineageState to be set in the new QueryState object
    * @return The new QueryState object
    */
-  private static QueryState getNewQueryState(HiveConf conf, LineageState lineageState) {
+  public static QueryState getNewQueryState(HiveConf conf, LineageState lineageState) {
     return new QueryState.Builder()
         .withGenerateNewQueryId(true)
         .withHiveConf(conf)
@@ -409,7 +403,7 @@ public class Driver implements IDriver {
       HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
       if (executeHooks) {
         hookCtx.setConf(driverContext.getConf());
-        hookCtx.setUserName(driverContext.getUserName());
+        hookCtx.setUserName(SessionState.get().getUserName());
         hookCtx.setIpAddress(SessionState.get().getUserIpAddress());
         hookCtx.setCommand(command);
         hookCtx.setHiveOperation(driverContext.getQueryState().getHiveOperation());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
index 1afcfc8..13e2f29 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
@@ -38,7 +38,6 @@ public class DriverContext {
   private final QueryState queryState;
   private final QueryInfo queryInfo;
   private final HiveConf conf;
-  private final String userName;
   private final HookRunner hookRunner;
 
   // Transaction manager the Driver has been initialized with (can be null).
@@ -72,12 +71,11 @@ public class DriverContext {
   private Context backupContext = null;
   private boolean retrial = false;
 
-  public DriverContext(QueryState queryState, QueryInfo queryInfo, String userName, HookRunner hookRunner,
+  public DriverContext(QueryState queryState, QueryInfo queryInfo, HookRunner hookRunner,
       HiveTxnManager initTxnManager) {
     this.queryState = queryState;
     this.queryInfo = queryInfo;
     this.conf = queryState.getConf();
-    this.userName = userName;
     this.hookRunner = hookRunner;
     this.initTxnManager = initTxnManager;
   }
@@ -98,10 +96,6 @@ public class DriverContext {
     return conf;
   }
 
-  public String getUserName() {
-    return userName;
-  }
-
   public HookRunner getHookRunner() {
     return hookRunner;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
index 963741c..9cca2a4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverFactory.java
@@ -35,13 +35,13 @@ import com.google.common.base.Strings;
 public class DriverFactory {
 
   public static IDriver newDriver(HiveConf conf) {
-    return newDriver(getNewQueryState(conf), null, null);
+    return newDriver(getNewQueryState(conf), null);
   }
 
-  public static IDriver newDriver(QueryState queryState, String userName, QueryInfo queryInfo) {
+  public static IDriver newDriver(QueryState queryState, QueryInfo queryInfo) {
     boolean enabled = queryState.getConf().getBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ENABLED);
     if (!enabled) {
-      return new Driver(queryState, userName, queryInfo);
+      return new Driver(queryState, queryInfo);
     }
 
     String strategies = queryState.getConf().getVar(ConfVars.HIVE_QUERY_REEXECUTION_STRATEGIES);
@@ -54,7 +54,7 @@ public class DriverFactory {
       plugins.add(buildReExecPlugin(string));
     }
 
-    return new ReExecDriver(queryState, userName, queryInfo, plugins);
+    return new ReExecDriver(queryState, queryInfo, plugins);
   }
 
   private static IReExecutionPlugin buildReExecPlugin(String name) throws RuntimeException {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverUtils.java
index 26e904a..aa8e64d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/DriverUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverUtils.java
@@ -48,7 +48,7 @@ public class DriverUtils {
     boolean isOk = false;
     try {
       QueryState qs = new QueryState.Builder().withHiveConf(conf).withGenerateNewQueryId(true).nonIsolated().build();
-      Driver driver = new Driver(qs, user, null, null);
+      Driver driver = new Driver(qs, null, null);
       driver.setCompactionWriteIds(writeIds, compactorTxnId);
       try {
         try {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 5ebf719..497b17f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -2341,10 +2341,10 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         }
         cmd.append(")");
       }
-      SessionState ss = SessionState.get();
-      // TODO: should this use getUserFromAuthenticator?
-      String uName = (ss == null? null: ss.getUserName());
-      Driver driver = new Driver(conf, uName, queryState.getLineageState());
+      // FIXME: is it ok to have a completely new querystate?
+      QueryState newQueryState = Driver.getNewQueryState(conf, queryState.getLineageState());
+      // FIXME: this driver instance is never closed
+      Driver driver = new Driver(newQueryState);
       int rc = driver.compile(cmd.toString(), false);
       if (rc != 0) {
         throw new SemanticException(ErrorMsg.NO_VALID_PARTN.getMsg());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
index dea46c0..a32af75 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
@@ -103,10 +103,9 @@ public class ReExecDriver implements IDriver {
     return executionIndex == 0;
   }
 
-  public ReExecDriver(QueryState queryState, String userName, QueryInfo queryInfo,
-      ArrayList<IReExecutionPlugin> plugins) {
+  public ReExecDriver(QueryState queryState, QueryInfo queryInfo, ArrayList<IReExecutionPlugin> plugins) {
     this.queryState = queryState;
-    coreDriver = new Driver(queryState, userName, queryInfo, null);
+    coreDriver = new Driver(queryState, queryInfo, null);
     coreDriver.getHookRunner().addSemanticAnalyzerHook(new HandleReOptimizationExplain());
     this.plugins = plugins;
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java b/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java
index 5dd105f..48bdc97 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/scheduled/ScheduledQueryExecutionService.java
@@ -118,7 +118,7 @@ public class ScheduledQueryExecutionService implements Closeable {
         info.setState(QueryState.EXECUTING);
         reportQueryProgress();
         try (
-          IDriver driver = DriverFactory.newDriver(DriverFactory.getNewQueryState(conf), q.getUser(), null)) {
+          IDriver driver = DriverFactory.newDriver(DriverFactory.getNewQueryState(conf), null)) {
           info.setExecutorQueryId(driver.getQueryState().getQueryId());
           driver.run(q.getQuery());
           info.setState(QueryState.FINISHED);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
index 3270175..749cdb6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
@@ -342,7 +342,7 @@ public class Worker extends RemoteCompactorThread implements MetaStoreThread {
         //todo: use DriverUtils.runOnDriver() here
         QueryState queryState = new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build();
         SessionState localSession = null;
-        try (Driver d = new Driver(queryState, userName)) {
+        try (Driver d = new Driver(queryState)) {
           if (SessionState.get() == null) {
             localSession = new SessionState(conf);
             SessionState.start(localSession);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
index e95edbf..389f5cc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
@@ -306,7 +306,7 @@ public class GenericUDTFGetSplits extends GenericUDTF {
     // So initialize the new Driver with a new TxnManager so that it does not use the
     // Session TxnManager that is already in use.
     HiveTxnManager txnManager = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
-    Driver driver = new Driver(new QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null, null, txnManager);
+    Driver driver = new Driver(new QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null, txnManager);
     DriverCleanup driverCleanup = new DriverCleanup(driver, txnManager, splitsAppId.toString());
     boolean needsCleanup = true;
     try {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
index 6e7b201..8421408 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
@@ -272,7 +272,7 @@ public class TestTxnCommands extends TxnCommandsBaseForTests {
         throw new RuntimeException(e);
       }
       QueryState qs = new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build();
-      try (Driver d = new Driver(qs, null)) {
+      try (Driver d = new Driver(qs)) {
         LOG.info("Ready to run the query: " + query);
         syncThreadStart(cdlIn, cdlOut);
         try {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
index cbc72b4..c184ce5 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
@@ -160,7 +160,7 @@ public class TestTxnCommands2 {
     }
     SessionState ss = SessionState.start(hiveConf);
     ss.applyAuthorizationPolicy();
-    d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null);
+    d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build());
     d.setMaxRows(10000);
     dropTables();
     runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES (" + tableProperties + ")");
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands3.java b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands3.java
index 7b3ab28..908ceb4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands3.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands3.java
@@ -306,7 +306,7 @@ public class TestTxnCommands3 extends TxnCommandsBaseForTests {
     runStatementOnDriver("insert into T values(0,2)");//makes delta_1_1 in T1
     runStatementOnDriver("insert into T values(1,4)");//makes delta_2_2 in T2
 
-    Driver driver2 = new Driver(new QueryState.Builder().withHiveConf(hiveConf).build(), null);
+    Driver driver2 = new Driver(new QueryState.Builder().withHiveConf(hiveConf).build());
     driver2.setMaxRows(10000);
 
     HiveTxnManager txnMgr2 = TxnManagerFactory.getTxnManagerFactory().getTxnManager(hiveConf);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java b/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
index f01a07e..af14e62 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java
@@ -114,7 +114,7 @@ public abstract class TxnCommandsBaseForTests {
     }
     SessionState ss = SessionState.start(hiveConf);
     ss.applyAuthorizationPolicy();
-    d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null);
+    d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build());
     d.setMaxRows(10000);
     dropTables();
     runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='true')");
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestLocalHiveSparkClient.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestLocalHiveSparkClient.java
index 94991d3..3f89b90 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestLocalHiveSparkClient.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestLocalHiveSparkClient.java
@@ -91,7 +91,7 @@ public class TestLocalHiveSparkClient {
     try {
       driver = new Driver(new QueryState.Builder()
           .withGenerateNewQueryId(true)
-          .withHiveConf(conf).build(), null, null);
+          .withHiveConf(conf).build(), null);
 
       SparkSession sparkSession = SparkUtilities.getSparkSession(conf,
           SparkSessionManagerImpl.getInstance());
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
index f9bbb28..23d860f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java
@@ -100,8 +100,8 @@ public class TestDbTxnManager2 {
   public void setUp() throws Exception {
     SessionState.start(conf);
     ctx = new Context(conf);
-    driver = new Driver(new QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null);
-    driver2 = new Driver(new QueryState.Builder().withHiveConf(conf).build(), null);
+    driver = new Driver(new QueryState.Builder().withHiveConf(conf).nonIsolated().build());
+    driver2 = new Driver(new QueryState.Builder().withHiveConf(conf).build());
     TxnDbUtil.cleanDb(conf);
     TxnDbUtil.prepDb(conf);
     SessionState ss = SessionState.get();
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index b87b670..468ce10 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -160,7 +160,7 @@ public class SQLOperation extends ExecuteStatementOperation {
   public void prepare(QueryState queryState) throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
-      driver = DriverFactory.newDriver(queryState, getParentSession().getUserName(), queryInfo);
+      driver = DriverFactory.newDriver(queryState, queryInfo);
 
       // Start the timer thread for cancelling the query when query timeout is reached
       // queryTimeout == 0 means no timeout