You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2019/04/18 14:55:17 UTC

[hive] branch master updated: HIVE-21593: Break up DDLTask - extract Privilege related operations (Miklos Gergely via Zoltan Haindrich)

This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new bb71ce5  HIVE-21593: Break up DDLTask - extract Privilege related operations (Miklos Gergely via Zoltan Haindrich)
bb71ce5 is described below

commit bb71ce5f54b07f74db9ae3f2dffe54d316107d52
Author: Miklos Gergely <mg...@hortonworks.com>
AuthorDate: Thu Apr 18 16:50:41 2019 +0200

    HIVE-21593: Break up DDLTask - extract Privilege related operations (Miklos Gergely via Zoltan Haindrich)
    
    Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>
---
 .../hive/ql/ddl/database/AlterDatabaseDesc.java    |   2 +-
 .../ql/ddl/database/ShowCreateDatabaseDesc.java    |   7 +-
 .../hive/ql/ddl/database/ShowDatabasesDesc.java    |   1 -
 .../hive/ql/ddl/function/DescFunctionDesc.java     |  10 +-
 .../hive/ql/ddl/function/ShowFunctionsDesc.java    |  10 +-
 .../ql/ddl/function/ShowFunctionsOperation.java    |   4 +-
 .../CreateRoleDesc.java}                           |  34 +--
 .../hive/ql/ddl/privilege/CreateRoleOperation.java |  44 +++
 .../privilege/DropRoleDesc.java}                   |  45 ++--
 .../hive/ql/ddl/privilege/DropRoleOperation.java   |  44 +++
 .../hive/ql/{plan => ddl/privilege}/GrantDesc.java | 106 ++------
 .../hive/ql/ddl/privilege/GrantOperation.java      |  63 +++++
 .../hive/ql/ddl/privilege/GrantRoleDesc.java       |  67 +++++
 .../hive/ql/ddl/privilege/GrantRoleOperation.java  |  59 ++++
 .../ql/{plan => ddl/privilege}/PrincipalDesc.java  |  27 +-
 .../ql/{plan => ddl/privilege}/PrivilegeDesc.java  |  39 +--
 .../privilege}/PrivilegeObjectDesc.java            |  52 ++--
 .../ql/{plan => ddl/privilege}/RevokeDesc.java     |  63 ++---
 .../hive/ql/ddl/privilege/RevokeOperation.java     |  62 +++++
 .../hive/ql/ddl/privilege/RevokeRoleDesc.java      |  67 +++++
 .../hive/ql/ddl/privilege/RevokeRoleOperation.java |  59 ++++
 .../hadoop/hive/ql/ddl/privilege/RoleUtils.java    |  75 ++++++
 .../SetRoleDesc.java}                              |  34 +--
 .../hive/ql/ddl/privilege/SetRoleOperation.java    |  44 +++
 .../ShowCurrentRoleDesc.java}                      |  25 +-
 .../ql/ddl/privilege/ShowCurrentRoleOperation.java |  49 ++++
 .../ql/{plan => ddl/privilege}/ShowGrantDesc.java  |  61 ++---
 .../hive/ql/ddl/privilege/ShowGrantOperation.java  | 103 +++++++
 .../ShowPrincipalsDesc.java}                       |  40 +--
 .../ql/ddl/privilege/ShowPrincipalsOperation.java  |  73 +++++
 .../ShowRoleGrantDesc.java}                        |  47 ++--
 .../ql/ddl/privilege/ShowRoleGrantOperation.java   |  72 +++++
 .../ShowRolesDesc.java}                            |  27 +-
 .../hive/ql/ddl/privilege/ShowRolesOperation.java  |  48 ++++
 .../hadoop/hive/ql/ddl/privilege/package-info.java |  20 ++
 .../hadoop/hive/ql/ddl/table/DescTableDesc.java    |  23 +-
 .../hive/ql/ddl/table/ShowCreateTableDesc.java     |   7 +-
 .../hive/ql/ddl/table/ShowTablePropertiesDesc.java |   7 +-
 .../hive/ql/ddl/table/ShowTableStatusDesc.java     |   7 +-
 .../hadoop/hive/ql/ddl/table/ShowTablesDesc.java   |  20 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java    | 298 ---------------------
 .../ql/exec/repl/bootstrap/load/LoadDatabase.java  |   2 +-
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |  53 ++--
 .../authorization/AuthorizationParseUtils.java     |   2 +-
 .../HiveAuthorizationTaskFactoryImpl.java          | 114 ++++----
 .../repl/load/message/AlterDatabaseHandler.java    |   2 +-
 .../repl/load/message/CreateDatabaseHandler.java   |   2 +-
 .../apache/hadoop/hive/ql/plan/AlterTableDesc.java |   1 +
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java    |  65 -----
 .../hadoop/hive/ql/plan/GrantRevokeRoleDDL.java    | 117 --------
 .../apache/hadoop/hive/ql/plan/RoleDDLDesc.java    | 159 -----------
 .../security/authorization/AuthorizationUtils.java |   6 +-
 .../DefaultHiveAuthorizationTranslator.java        |   6 +-
 .../hive/ql/security/authorization/Privilege.java  |   4 +
 .../ql/security/authorization/PrivilegeType.java   |   3 +
 .../plugin/HiveAuthorizationTranslator.java        |   6 +-
 .../parse/authorization/AuthorizationTestUtil.java |  25 +-
 .../ql/parse/authorization/PrivilegesTestBase.java |  21 +-
 .../TestHiveAuthorizationTaskFactory.java          | 168 +++++-------
 .../authorization_cannot_create_default_role.q.out |   2 +-
 .../authorization_caseinsensitivity.q.out          |   2 +-
 .../authorization_create_role_no_admin.q.out       |   2 +-
 .../authorization_drop_admin_role.q.out            |   2 +-
 .../authorization_drop_role_no_admin.q.out         |   2 +-
 .../clientnegative/authorization_fail_1.q.out      |   2 +-
 .../clientnegative/authorization_fail_8.q.out      |   2 +-
 .../clientnegative/authorization_grant_group.q.out |   2 +-
 .../authorization_grant_table_allpriv.q.out        |   2 +-
 .../authorization_grant_table_dup.q.out            |   2 +-
 .../authorization_grant_table_fail1.q.out          |   2 +-
 .../authorization_grant_table_fail_nogrant.q.out   |   2 +-
 .../authorization_invalid_priv_v2.q.out            |   2 +-
 .../authorization_priv_current_role_neg.q.out      |   2 +-
 .../authorization_public_create.q.out              |   2 +-
 .../clientnegative/authorization_public_drop.q.out |   2 +-
 .../authorization_revoke_table_fail1.q.out         |   2 +-
 .../authorization_revoke_table_fail2.q.out         |   2 +-
 .../clientnegative/authorization_role_case.q.out   |   2 +-
 .../authorization_role_cycles1.q.out               |   2 +-
 .../authorization_role_cycles2.q.out               |   2 +-
 .../clientnegative/authorization_role_grant.q.out  |   2 +-
 .../clientnegative/authorization_role_grant2.q.out |   2 +-
 .../authorization_role_grant_nosuchrole.q.out      |   2 +-
 .../authorization_role_grant_otherrole.q.out       |   2 +-
 .../authorization_role_grant_otheruser.q.out       |   2 +-
 .../authorization_set_role_neg1.q.out              |   2 +-
 .../authorization_set_role_neg2.q.out              |   2 +-
 .../authorization_show_grant_otherrole.q.out       |   2 +-
 .../authorization_show_grant_otheruser_all.q.out   |   2 +-
 ...uthorization_show_grant_otheruser_alltabs.q.out |   2 +-
 .../authorization_show_grant_otheruser_wtab.q.out  |   2 +-
 ...thorization_show_role_principals_no_admin.q.out |   2 +-
 .../authorization_show_roles_no_admin.q.out        |   2 +-
 .../authorization_table_grant_nosuchrole.q.out     |   2 +-
 .../clientnegative/authorize_grant_public.q.out    |   2 +-
 .../clientnegative/authorize_revoke_public.q.out   |   2 +-
 .../clientpositive/tez/explainanalyze_3.q.out      |  10 +
 .../results/clientpositive/tez/explainuser_3.q.out |  10 +
 98 files changed, 1406 insertions(+), 1389 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java
index 547b351..46eb092 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java
@@ -23,10 +23,10 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 
 /**
  * DDL task description for ALTER DATABASE commands.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java
index 29dc266..848bfab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowCreateDatabaseDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "createdb_stmt#string";
+
   static {
     DDLTask2.registerOperation(ShowCreateDatabaseDesc.class, ShowCreateDatabaseOperation.class);
   }
@@ -40,11 +42,6 @@ public class ShowCreateDatabaseDesc implements DDLDesc, Serializable {
   private final String resFile;
   private final String dbName;
 
-  /**
-   * Thrift ddl for the result of showcreatedatabase.
-   */
-  public static final String SCHEMA = "createdb_stmt#string";
-
   public ShowCreateDatabaseDesc(String dbName, String resFile) {
     this.dbName = dbName;
     this.resFile = resFile;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java
index 4814fd3..e8f4e44 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowDatabasesDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  /** Thrift ddl for the result of show databases. */
   public static final String SHOW_DATABASES_SCHEMA = "database_name#string";
 
   static {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
index 7f1aa0c..b84c630 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
@@ -33,18 +33,12 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class DescFunctionDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "tab_name#string";
+
   static {
     DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  private static final String SCHEMA = "tab_name#string";
-  public static String getSchema() {
-    return SCHEMA;
-  }
-
   private final String resFile;
   private final String name;
   private final boolean isExtended;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
index 2affa32..79074e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
@@ -33,18 +33,12 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowFunctionsDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "tab_name#string";
+
   static {
     DDLTask2.registerOperation(ShowFunctionsDesc.class, ShowFunctionsOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  private static final String SCHEMA = "tab_name#string";
-  public static String getSchema() {
-    return SCHEMA;
-  }
-
   private final String resFile;
   private final String pattern;
   private final boolean isLikePattern;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
index d76312d..d23899c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
@@ -54,14 +54,14 @@ public class ShowFunctionsOperation extends DDLOperation {
   private Set<String> fetchFunctions() {
     Set<String> funcs = null;
     if (desc.getPattern() != null) {
-      LOG.debug("pattern: {}", desc.getPattern());
       if (desc.getIsLikePattern()) {
         funcs = FunctionRegistry.getFunctionNamesByLikePattern(desc.getPattern());
       } else {
         context.getConsole().printInfo("SHOW FUNCTIONS is deprecated, please use SHOW FUNCTIONS LIKE instead.");
         funcs = FunctionRegistry.getFunctionNames(desc.getPattern());
       }
-      LOG.info("Found {} function(s) matching the SHOW FUNCTIONS statement.", funcs.size());
+      LOG.info("Found {} function(s) using pattern {} matching the SHOW FUNCTIONS statement.", funcs.size(),
+          desc.getPattern());
     } else {
       funcs = FunctionRegistry.getFunctionNames();
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java
similarity index 57%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java
index 7f1aa0c..9641682 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java
@@ -16,56 +16,34 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.ddl.DDLTask2;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for DESC FUNCTION commands.
+ * DDL task description for CREATE ROLE commands.
  */
-@Explain(displayName = "Describe Function", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class DescFunctionDesc implements DDLDesc, Serializable {
+@Explain(displayName = "Create Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateRoleDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
   static {
-    DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class);
+    DDLTask2.registerOperation(CreateRoleDesc.class, CreateRoleOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  private static final String SCHEMA = "tab_name#string";
-  public static String getSchema() {
-    return SCHEMA;
-  }
-
-  private final String resFile;
   private final String name;
-  private final boolean isExtended;
 
-  public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
-    this.resFile = resFile.toString();
+  public CreateRoleDesc(String name) {
     this.name = name;
-    this.isExtended = isExtended;
-  }
-
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
-  public String getResFile() {
-    return resFile;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return name;
   }
-
-  public boolean isExtended() {
-    return isExtended;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java
new file mode 100644
index 0000000..6782b02
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+
+/**
+ * Operation process of creating a role.
+ */
+public class CreateRoleOperation extends DDLOperation {
+  private final CreateRoleDesc desc;
+
+  public CreateRoleOperation(DDLOperationContext context, CreateRoleDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    authorizer.createRole(desc.getName(), null);
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java
similarity index 57%
copy from ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java
index 1d82b19..b8dcaac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java
@@ -16,49 +16,34 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
-@Explain(displayName = "Principal", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class PrincipalDesc implements Serializable, Cloneable {
-
+/**
+ * DDL task description for DROP ROLE commands.
+ */
+@Explain(displayName = "Drop Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class DropRoleDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String name;
+  static {
+    DDLTask2.registerOperation(DropRoleDesc.class, DropRoleOperation.class);
+  }
 
-  private PrincipalType type;
+  private final String name;
 
-  public PrincipalDesc(String name, PrincipalType type) {
-    super();
+  public DropRoleDesc(String name) {
     this.name = name;
-    this.type = type;
   }
 
-  public PrincipalDesc() {
-    super();
-  }
-
-  @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return name;
   }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  @Explain(displayName="type", explainLevels = { Level.EXTENDED })
-  public PrincipalType getType() {
-    return type;
-  }
-
-  public void setType(PrincipalType type) {
-    this.type = type;
-  }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java
new file mode 100644
index 0000000..e8b55ec
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+
+/**
+ * Operation process of dropping a role.
+ */
+public class DropRoleOperation extends DDLOperation {
+  private final DropRoleDesc desc;
+
+  public DropRoleOperation(DDLOperationContext context, DropRoleDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    authorizer.dropRole(desc.getName());
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java
similarity index 54%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java
index b5f9a69..0cd7f1e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java
@@ -16,118 +16,70 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
+/**
+ * DDL task description for GRANT commands.
+ */
 @Explain(displayName = "Grant", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class GrantDesc extends DDLDesc implements Serializable, Cloneable {
-
+public class GrantDesc implements DDLDesc, Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
 
-  private List<PrivilegeDesc> privileges;
-
-  private List<PrincipalDesc> principals;
-
-  private boolean grantOption;
-  
-  private String grantor;
-  
-  private PrincipalType grantorType;
+  static {
+    DDLTask2.registerOperation(GrantDesc.class, GrantOperation.class);
+  }
 
-  private PrivilegeObjectDesc privilegeSubjectDesc;
+  private final PrivilegeObjectDesc privilegeSubject;
+  private final List<PrivilegeDesc> privileges;
+  private final List<PrincipalDesc> principals;
+  private final String grantor;
+  private final PrincipalType grantorType;
+  private final boolean grantOption;
 
-  public GrantDesc(PrivilegeObjectDesc privilegeSubject,
-      List<PrivilegeDesc> privilegeDesc, List<PrincipalDesc> principalDesc,
+  public GrantDesc(PrivilegeObjectDesc privilegeSubject, List<PrivilegeDesc> privileges, List<PrincipalDesc> principals,
       String grantor, PrincipalType grantorType, boolean grantOption) {
-    super();
-    this.privilegeSubjectDesc = privilegeSubject;
-    this.privileges = privilegeDesc;
-    this.principals = principalDesc;
+    this.privilegeSubject = privilegeSubject;
+    this.privileges = privileges;
+    this.principals = principals;
     this.grantor = grantor;
     this.grantorType = grantorType;
     this.grantOption = grantOption;
   }
 
-  /**
-   * @return privileges
-   */
+  @Explain(skipHeader = true, explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public PrivilegeObjectDesc getPrivilegeSubject() {
+    return privilegeSubject;
+  }
+
   @Explain(displayName = "Privileges", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<PrivilegeDesc> getPrivileges() {
     return privileges;
   }
 
-  /**
-   * @param privileges
-   */
-  public void setPrivileges(List<PrivilegeDesc> privileges) {
-    this.privileges = privileges;
-  }
-
-  /**
-   * @return principals 
-   */
   @Explain(displayName = "Principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<PrincipalDesc> getPrincipals() {
     return principals;
   }
 
-  /**
-   * @param principals
-   */
-  public void setPrincipals(List<PrincipalDesc> principals) {
-    this.principals = principals;
-  }
-
-  /**
-   * @return grant option
-   */
-  @Explain(displayName = "grant option", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public boolean isGrantOption() {
-    return grantOption;
-  }
-
-  /**
-   * @param grantOption
-   */
-  public void setGrantOption(boolean grantOption) {
-    this.grantOption = grantOption;
-  }
-
-  /**
-   * @return privilege subject
-   */
-  @Explain(displayName="privilege subject", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public PrivilegeObjectDesc getPrivilegeSubjectDesc() {
-    return privilegeSubjectDesc;
-  }
-
-  /**
-   * @param privilegeSubjectDesc
-   */
-  public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) {
-    this.privilegeSubjectDesc = privilegeSubjectDesc;
-  }
-
   public String getGrantor() {
     return grantor;
   }
 
-  public void setGrantor(String grantor) {
-    this.grantor = grantor;
-  }
-
   public PrincipalType getGrantorType() {
     return grantorType;
   }
 
-  public void setGrantorType(PrincipalType grantorType) {
-    this.grantorType = grantorType;
+  @Explain(displayName = "grant option", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isGrantOption() {
+    return grantOption;
   }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java
new file mode 100644
index 0000000..633ac43
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+/**
+ * Operation process of granting.
+ */
+public class GrantOperation extends DDLOperation {
+  private final GrantDesc desc;
+
+  public GrantOperation(DDLOperationContext context, GrantDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+
+    //Convert to object types used by the authorization plugin interface
+    List<HivePrincipal> hivePrincipals = AuthorizationUtils.getHivePrincipals(desc.getPrincipals(),
+        RoleUtils.getAuthorizationTranslator(authorizer));
+    List<HivePrivilege> hivePrivileges = AuthorizationUtils.getHivePrivileges(desc.getPrivileges(),
+        RoleUtils.getAuthorizationTranslator(authorizer));
+    HivePrivilegeObject hivePrivilegeObject =
+        RoleUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getPrivilegeSubject());
+    HivePrincipal grantorPrincipal = new HivePrincipal(desc.getGrantor(),
+        AuthorizationUtils.getHivePrincipalType(desc.getGrantorType()));
+
+    authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivilegeObject, grantorPrincipal,
+        desc.isGrantOption());
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java
new file mode 100644
index 0000000..e27931a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for GRANT ROLE commands.
+ */
+@Explain(displayName="Grant roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class GrantRoleDesc implements DDLDesc {
+
+  static {
+    DDLTask2.registerOperation(GrantRoleDesc.class, GrantRoleOperation.class);
+  }
+
+  private final List<String> roles;
+  private final List<PrincipalDesc> principals;
+  private final String grantor;
+  private final boolean grantOption;
+
+  public GrantRoleDesc(List<String> roles, List<PrincipalDesc> principals, String grantor, boolean grantOption) {
+    this.principals = principals;
+    this.roles = roles;
+    this.grantor = grantor;
+    this.grantOption = grantOption;
+  }
+
+  @Explain(displayName="principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public List<PrincipalDesc> getPrincipals() {
+    return principals;
+  }
+
+  @Explain(displayName="roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public List<String> getRoles() {
+    return roles;
+  }
+
+  public String getGrantor() {
+    return grantor;
+  }
+
+  public boolean isGrantOption() {
+    return grantOption;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java
new file mode 100644
index 0000000..19abe27
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+
+/**
+ * Operation process of granting a role.
+ */
+public class GrantRoleOperation extends DDLOperation {
+  private final GrantRoleDesc desc;
+
+  public GrantRoleOperation(DDLOperationContext context, GrantRoleDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+
+    List<HivePrincipal> principals =
+        AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), RoleUtils.getAuthorizationTranslator(authorizer));
+    HivePrincipal grantorPrincipal = null;
+    if (desc.getGrantor() != null) {
+      grantorPrincipal =
+          new HivePrincipal(desc.getGrantor(), AuthorizationUtils.getHivePrincipalType(PrincipalType.USER));
+    }
+
+    authorizer.grantRole(principals, desc.getRoles(), desc.isGrantOption(), grantorPrincipal);
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrincipalDesc.java
similarity index 84%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrincipalDesc.java
index 1d82b19..9c7b095 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrincipalDesc.java
@@ -16,49 +16,36 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
+/**
+ * Represents a database principal.
+ */
 @Explain(displayName = "Principal", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class PrincipalDesc implements Serializable, Cloneable {
-
   private static final long serialVersionUID = 1L;
 
-  private String name;
-
-  private PrincipalType type;
+  private final String name;
+  private final PrincipalType type;
 
   public PrincipalDesc(String name, PrincipalType type) {
-    super();
     this.name = name;
     this.type = type;
   }
 
-  public PrincipalDesc() {
-    super();
-  }
-
   @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return name;
   }
 
-  public void setName(String name) {
-    this.name = name;
-  }
-
   @Explain(displayName="type", explainLevels = { Level.EXTENDED })
   public PrincipalType getType() {
     return type;
   }
-
-  public void setType(PrincipalType type) {
-    this.type = type;
-  }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeDesc.java
similarity index 76%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeDesc.java
index 1cb328a..0b308cc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeDesc.java
@@ -16,22 +16,24 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 import java.util.List;
 
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
+/**
+ * Represents a database privilege.
+ */
 @Explain(displayName = "Privilege", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class PrivilegeDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
-  
-  private Privilege privilege;
-  
-  private List<String> columns;
+
+  private final Privilege privilege;
+  private final List<String> columns;
 
   public PrivilegeDesc(Privilege privilege, List<String> columns) {
     super();
@@ -39,38 +41,13 @@ public class PrivilegeDesc implements Serializable, Cloneable {
     this.columns = columns;
   }
 
-  public PrivilegeDesc() {
-    super();
-  }
-
-  /**
-   * @return privilege definition
-   */
   @Explain(displayName = "privilege", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Privilege getPrivilege() {
     return privilege;
   }
 
-  /**
-   * @param privilege
-   */
-  public void setPrivilege(Privilege privilege) {
-    this.privilege = privilege;
-  }
-
-  /**
-   * @return columns on which the given privilege take affect.
-   */
   @Explain(displayName = "columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getColumns() {
     return columns;
   }
-
-  /**
-   * @param columns
-   */
-  public void setColumns(List<String> columns) {
-    this.columns = columns;
-  }
-  
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeObjectDesc.java
similarity index 66%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeObjectDesc.java
index f18a51b..785cddd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeObjectDesc.java
@@ -16,34 +16,32 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
-import java.util.HashMap;
+import java.io.Serializable;
 import java.util.List;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
+import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
+/**
+ * Represents a privilege object.
+ */
 @Explain(displayName="privilege subject", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class PrivilegeObjectDesc {
-
-  //default type is table
-  private boolean table = true;
-
-  private String object;
-
-  private HashMap<String, String> partSpec;
+public class PrivilegeObjectDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
 
-  private List<String> columns;
+  private final boolean table;
+  private final String object;
+  private final Map<String, String> partSpec;
+  private final List<String> columns;
 
-  public PrivilegeObjectDesc(boolean isTable, String object,
-      HashMap<String, String> partSpec) {
-    super();
+  public PrivilegeObjectDesc(boolean isTable, String object, Map<String, String> partSpec, List<String> columns) {
     this.table = isTable;
     this.object = object;
     this.partSpec = partSpec;
-  }
-
-  public PrivilegeObjectDesc() {
+    this.columns = columns;
   }
 
   @Explain(displayName="is table")
@@ -51,33 +49,17 @@ public class PrivilegeObjectDesc {
     return table;
   }
 
-  public void setTable(boolean isTable) {
-    this.table = isTable;
-  }
-
   @Explain(displayName="object", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getObject() {
     return object;
   }
 
-  public void setObject(String object) {
-    this.object = object;
-  }
-
   @Explain(displayName="partition spec", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public HashMap<String, String> getPartSpec() {
+  public Map<String, String> getPartSpec() {
     return partSpec;
   }
 
-  public void setPartSpec(HashMap<String, String> partSpec) {
-    this.partSpec = partSpec;
-  }
-
   public List<String> getColumns() {
     return columns;
   }
-
-  public void setColumns(List<String> columns) {
-    this.columns = columns;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java
similarity index 52%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java
index 0e0db1f..dd873c7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java
@@ -16,73 +16,56 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 import java.util.List;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
+/**
+ * DDL task description for REVOKE commands.
+ */
 @Explain(displayName="Revoke", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class RevokeDesc extends DDLDesc implements Serializable, Cloneable {
-
+public class RevokeDesc implements DDLDesc, Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
 
-  private List<PrivilegeDesc> privileges;
-
-  private List<PrincipalDesc> principals;
-
-  private PrivilegeObjectDesc privilegeSubjectDesc;
-
-  private boolean grantOption;
-
-  public RevokeDesc(){
+  static {
+    DDLTask2.registerOperation(RevokeDesc.class, RevokeOperation.class);
   }
 
-  public RevokeDesc(List<PrivilegeDesc> privileges,
-      List<PrincipalDesc> principals, PrivilegeObjectDesc privilegeSubjectDesc) {
-    this(privileges, principals, privilegeSubjectDesc, false);
-  }
+  private final List<PrivilegeDesc> privileges;
+  private final List<PrincipalDesc> principals;
+  private final PrivilegeObjectDesc privilegeSubject;
+  private final boolean grantOption;
 
-  public RevokeDesc(List<PrivilegeDesc> privileges,
-        List<PrincipalDesc> principals, PrivilegeObjectDesc privilegeSubjectDesc, boolean grantOption) {
-    super();
+  public RevokeDesc(List<PrivilegeDesc> privileges, List<PrincipalDesc> principals,
+      PrivilegeObjectDesc privilegeSubject, boolean grantOption) {
     this.privileges = privileges;
     this.principals = principals;
-    this.privilegeSubjectDesc = privilegeSubjectDesc;
+    this.privilegeSubject = privilegeSubject;
     this.grantOption = grantOption;
   }
 
+  @Explain(displayName = "Privileges", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<PrivilegeDesc> getPrivileges() {
     return privileges;
   }
 
-  public void setPrivileges(List<PrivilegeDesc> privileges) {
-    this.privileges = privileges;
-  }
-
+  @Explain(displayName = "Principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<PrincipalDesc> getPrincipals() {
     return principals;
   }
 
-  public void setPrincipals(List<PrincipalDesc> principals) {
-    this.principals = principals;
-  }
-
-  public PrivilegeObjectDesc getPrivilegeSubjectDesc() {
-    return privilegeSubjectDesc;
-  }
-
-  public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) {
-    this.privilegeSubjectDesc = privilegeSubjectDesc;
+  @Explain(skipHeader = true, explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public PrivilegeObjectDesc getPrivilegeSubject() {
+    return privilegeSubject;
   }
 
   public boolean isGrantOption() {
     return grantOption;
   }
-
-  public void setGrantOption(boolean grantOption) {
-    this.grantOption = grantOption;
-  }
-  
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java
new file mode 100644
index 0000000..bf4e01a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+/**
+ * Operation process of revoking.
+ */
+public class RevokeOperation extends DDLOperation {
+  private final RevokeDesc desc;
+
+  public RevokeOperation(DDLOperationContext context, RevokeDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+
+    //Convert to object types used by the authorization plugin interface
+    List<HivePrincipal> hivePrincipals = AuthorizationUtils.getHivePrincipals(desc.getPrincipals(),
+        RoleUtils.getAuthorizationTranslator(authorizer));
+    List<HivePrivilege> hivePrivileges = AuthorizationUtils.getHivePrivileges(desc.getPrivileges(),
+        RoleUtils.getAuthorizationTranslator(authorizer));
+    HivePrivilegeObject hivePrivilegeObject =
+        RoleUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getPrivilegeSubject());
+    HivePrincipal grantorPrincipal = new HivePrincipal(null, null);
+
+    authorizer.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivilegeObject, grantorPrincipal,
+        desc.isGrantOption());
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java
new file mode 100644
index 0000000..05507ec
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for REVOKE ROLE commands.
+ */
+@Explain(displayName="Revoke roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class RevokeRoleDesc implements DDLDesc {
+
+  static {
+    DDLTask2.registerOperation(RevokeRoleDesc.class, RevokeRoleOperation.class);
+  }
+
+  private final List<String> roles;
+  private final List<PrincipalDesc> principals;
+  private final String grantor;
+  private final boolean grantOption;
+
+  public RevokeRoleDesc(List<String> roles, List<PrincipalDesc> principals, String grantor, boolean grantOption) {
+    this.principals = principals;
+    this.roles = roles;
+    this.grantor = grantor;
+    this.grantOption = grantOption;
+  }
+
+  @Explain(displayName="principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public List<PrincipalDesc> getPrincipals() {
+    return principals;
+  }
+
+  @Explain(displayName="roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public List<String> getRoles() {
+    return roles;
+  }
+
+  public String getGrantor() {
+    return grantor;
+  }
+
+  public boolean isGrantOption() {
+    return grantOption;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java
new file mode 100644
index 0000000..0b3b276
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+
+/**
+ * Operation process of revoking a role.
+ */
+public class RevokeRoleOperation extends DDLOperation {
+  private final RevokeRoleDesc desc;
+
+  public RevokeRoleOperation(DDLOperationContext context, RevokeRoleDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+
+    List<HivePrincipal> principals =
+        AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), RoleUtils.getAuthorizationTranslator(authorizer));
+    HivePrincipal grantorPrincipal = null;
+    if (desc.getGrantor() != null) {
+      grantorPrincipal =
+          new HivePrincipal(desc.getGrantor(), AuthorizationUtils.getHivePrincipalType(PrincipalType.USER));
+    }
+
+    authorizer.revokeRole(principals, desc.getRoles(), desc.isGrantOption(), grantorPrincipal);
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RoleUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RoleUtils.java
new file mode 100644
index 0000000..cfbc4cf
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RoleUtils.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+/**
+ * Common utilities for Role related ddl operations.
+ */
+final class RoleUtils {
+  private RoleUtils() {
+    throw new UnsupportedOperationException("RoleUtils should not be instantiated");
+  }
+
+  static HiveAuthorizer getSessionAuthorizer(HiveConf conf) {
+    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    if (authorizer == null) {
+      authorizer = new HiveV1Authorizer(conf);
+    }
+
+    return authorizer;
+  }
+
+  static void writeListToFileAfterSort(List<String> entries, String resFile, DDLOperationContext context)
+      throws IOException {
+    Collections.sort(entries);
+
+    StringBuilder sb = new StringBuilder();
+    for (String entry : entries) {
+      DDLUtils.appendNonNull(sb, entry, true);
+    }
+
+    DDLUtils.writeToFile(sb.toString(), resFile, context);
+  }
+
+  private static final HiveAuthorizationTranslator DEFAULT_AUTHORIZATION_TRANSLATOR =
+      new DefaultHiveAuthorizationTranslator();
+
+  static HiveAuthorizationTranslator getAuthorizationTranslator(HiveAuthorizer authorizer)
+      throws HiveAuthzPluginException {
+    if (authorizer.getHiveAuthorizationTranslator() == null) {
+      return DEFAULT_AUTHORIZATION_TRANSLATOR;
+    } else {
+      return (HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator();
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java
similarity index 57%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java
index 7f1aa0c..e3e1bb1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java
@@ -16,56 +16,34 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.ddl.DDLTask2;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for DESC FUNCTION commands.
+ * DDL task description for SET ROLE commands.
  */
-@Explain(displayName = "Describe Function", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class DescFunctionDesc implements DDLDesc, Serializable {
+@Explain(displayName = "Set Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class SetRoleDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
   static {
-    DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class);
+    DDLTask2.registerOperation(SetRoleDesc.class, SetRoleOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  private static final String SCHEMA = "tab_name#string";
-  public static String getSchema() {
-    return SCHEMA;
-  }
-
-  private final String resFile;
   private final String name;
-  private final boolean isExtended;
 
-  public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
-    this.resFile = resFile.toString();
+  public SetRoleDesc(String name) {
     this.name = name;
-    this.isExtended = isExtended;
-  }
-
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
-  public String getResFile() {
-    return resFile;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return name;
   }
-
-  public boolean isExtended() {
-    return isExtended;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java
new file mode 100644
index 0000000..d119fe4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+
+/**
+ * Operation process of setting a role.
+ */
+public class SetRoleOperation extends DDLOperation {
+  private final SetRoleDesc desc;
+
+  public SetRoleOperation(DDLOperationContext context, SetRoleDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    authorizer.setCurrentRole(desc.getName());
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java
similarity index 61%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java
index 8fa1ef1..37f049e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.table;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
@@ -26,36 +26,23 @@ import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for SHOW CREATE TABLE commands.
+ * DDL task description for SHOW CURRENT ROLE commands.
  */
-@Explain(displayName = "Show Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class ShowCreateTableDesc implements DDLDesc, Serializable {
+@Explain(displayName = "Show Current Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class ShowCurrentRoleDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
   static {
-    DDLTask2.registerOperation(ShowCreateTableDesc.class, ShowCreateTableOperation.class);
+    DDLTask2.registerOperation(ShowCurrentRoleDesc.class, ShowCurrentRoleOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of showcreatetable.
-   */
-  public static final String SCHEMA = "createtab_stmt#string";
-
   private final String resFile;
-  private final String tableName;
 
-  public ShowCreateTableDesc(String tableName, String resFile) {
-    this.tableName = tableName;
+  public ShowCurrentRoleDesc(String resFile) {
     this.resFile = resFile;
   }
 
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
-
-  @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getTableName() {
-    return tableName;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java
new file mode 100644
index 0000000..9738ddb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+
+/**
+ * Operation process of showing the current role.
+ */
+public class ShowCurrentRoleOperation extends DDLOperation {
+  private final ShowCurrentRoleDesc desc;
+
+  public ShowCurrentRoleOperation(DDLOperationContext context, ShowCurrentRoleDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    List<String> roleNames = authorizer.getCurrentRoleNames();
+    RoleUtils.writeListToFileAfterSort(roleNames, desc.getResFile(), context);
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java
similarity index 58%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java
index 23d786f..c5c8fe4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java
@@ -15,64 +15,47 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.privilege;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
-@Explain(displayName="show grant desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class ShowGrantDesc {
-  
-  private PrincipalDesc principalDesc;
-
-  private PrivilegeObjectDesc hiveObj;
-  
-  private String resFile;
-
-  /**
-   * thrift ddl for the result of show grant.
-   */
-  private static final String tabularSchema =
+/**
+ * DDL task description for SHOW GRANT commands.
+ */
+@Explain(displayName="Show grant desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class ShowGrantDesc implements DDLDesc {
+  public static final String SCHEMA =
       "database,table,partition,column,principal_name,principal_type,privilege," +
       "grant_option,grant_time,grantor#" +
       "string:string:string:string:string:string:string:boolean:bigint:string";
 
-  public ShowGrantDesc(){
-  }
-  
-  public ShowGrantDesc(String resFile, PrincipalDesc principalDesc,
-      PrivilegeObjectDesc subjectObj) {
-    this.resFile = resFile;
-    this.principalDesc = principalDesc;
-    this.hiveObj = subjectObj;
+  static {
+    DDLTask2.registerOperation(ShowGrantDesc.class, ShowGrantOperation.class);
   }
 
-  public static String getSchema() {
-    return tabularSchema;
+  private final String resFile;
+  private final PrincipalDesc principal;
+  private final PrivilegeObjectDesc hiveObj;
+
+  public ShowGrantDesc(String resFile, PrincipalDesc principal, PrivilegeObjectDesc hiveObj) {
+    this.resFile = resFile;
+    this.principal = principal;
+    this.hiveObj = hiveObj;
   }
 
   @Explain(displayName="principal desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public PrincipalDesc getPrincipalDesc() {
-    return principalDesc;
-  }
-
-  public void setPrincipalDesc(PrincipalDesc principalDesc) {
-    this.principalDesc = principalDesc;
+    return principal;
   }
 
-  @Explain(displayName="object", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(skipHeader = true,  explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public PrivilegeObjectDesc getHiveObj() {
     return hiveObj;
   }
 
-  public void setHiveObj(PrivilegeObjectDesc subjectObj) {
-    this.hiveObj = subjectObj;
-  }
-  
   public String getResFile() {
     return resFile;
   }
-
-  public void setResFile(String resFile) {
-    this.resFile = resFile;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java
new file mode 100644
index 0000000..50b4180
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+/**
+ * Operation process of showing a grant.
+ */
+public class ShowGrantOperation extends DDLOperation {
+  private final ShowGrantDesc desc;
+
+  public ShowGrantOperation(DDLOperationContext context, ShowGrantDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    try {
+      List<HivePrivilegeInfo> privInfos = authorizer.showPrivileges(
+          RoleUtils.getAuthorizationTranslator(authorizer).getHivePrincipal(desc.getPrincipalDesc()),
+          RoleUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getHiveObj()));
+      boolean testMode = context.getConf().getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
+      DDLUtils.writeToFile(writeGrantInfo(privInfos, testMode), desc.getResFile(), context);
+    } catch (IOException e) {
+      throw new HiveException("Error in show grant statement", e);
+    }
+    return 0;
+  }
+
+  private String writeGrantInfo(List<HivePrivilegeInfo> privileges, boolean testMode) {
+    if (CollectionUtils.isEmpty(privileges)) {
+      return "";
+    }
+
+    //sort the list to get sorted (deterministic) output (for ease of testing)
+    Collections.sort(privileges, new Comparator<HivePrivilegeInfo>() {
+      @Override
+      public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) {
+        int compare = o1.getObject().compareTo(o2.getObject());
+        if (compare == 0) {
+          compare = o1.getPrincipal().compareTo(o2.getPrincipal());
+        }
+        if (compare == 0) {
+          compare = o1.getPrivilege().compareTo(o2.getPrivilege());
+        }
+        return compare;
+      }
+    });
+
+    StringBuilder builder = new StringBuilder();
+    for (HivePrivilegeInfo privilege : privileges) {
+      HivePrincipal principal = privilege.getPrincipal();
+      HivePrivilegeObject resource = privilege.getObject();
+      HivePrincipal grantor = privilege.getGrantorPrincipal();
+
+      DDLUtils.appendNonNull(builder, resource.getDbname(), true);
+      DDLUtils.appendNonNull(builder, resource.getObjectName());
+      DDLUtils.appendNonNull(builder, resource.getPartKeys());
+      DDLUtils.appendNonNull(builder, resource.getColumns());
+      DDLUtils.appendNonNull(builder, principal.getName());
+      DDLUtils.appendNonNull(builder, principal.getType());
+      DDLUtils.appendNonNull(builder, privilege.getPrivilege().getName());
+      DDLUtils.appendNonNull(builder, privilege.isGrantOption());
+      DDLUtils.appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L);
+      DDLUtils.appendNonNull(builder, grantor.getName());
+    }
+    return builder.toString();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java
similarity index 61%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java
index 7f1aa0c..e1392c9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java
@@ -16,48 +16,36 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.ddl.DDLTask2;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for DESC FUNCTION commands.
+ * DDL task description for SHOW PRINCIPALS commands.
  */
-@Explain(displayName = "Describe Function", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class DescFunctionDesc implements DDLDesc, Serializable {
+@Explain(displayName = "Show Principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class ShowPrincipalsDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  static {
-    DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class);
-  }
+  public static final String SCHEMA =
+      "principal_name,principal_type,grant_option,grantor,grantor_type,grant_time#" +
+      "string:string:boolean:string:string:bigint";
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  private static final String SCHEMA = "tab_name#string";
-  public static String getSchema() {
-    return SCHEMA;
+  static {
+    DDLTask2.registerOperation(ShowPrincipalsDesc.class, ShowPrincipalsOperation.class);
   }
 
-  private final String resFile;
   private final String name;
-  private final boolean isExtended;
+  private final String resFile;
 
-  public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
-    this.resFile = resFile.toString();
+  public ShowPrincipalsDesc(String name, String resFile) {
     this.name = name;
-    this.isExtended = isExtended;
-  }
-
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
-  public String getResFile() {
-    return resFile;
+    this.resFile = resFile;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -65,7 +53,7 @@ public class DescFunctionDesc implements DDLDesc, Serializable {
     return name;
   }
 
-  public boolean isExtended() {
-    return isExtended;
+  public String getResFile() {
+    return resFile;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java
new file mode 100644
index 0000000..392142b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+
+/**
+ * Operation process of showing the principals.
+ */
+public class ShowPrincipalsOperation extends DDLOperation {
+  private final ShowPrincipalsDesc desc;
+
+  public ShowPrincipalsOperation(DDLOperationContext context, ShowPrincipalsDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    boolean testMode = context.getConf().getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
+    List<HiveRoleGrant> roleGrants = authorizer.getPrincipalGrantInfoForRole(desc.getName());
+    DDLUtils.writeToFile(writeHiveRoleGrantInfo(roleGrants, testMode), desc.getResFile(), context);
+
+    return 0;
+  }
+
+  private String writeHiveRoleGrantInfo(List<HiveRoleGrant> roleGrants, boolean testMode) {
+    if (roleGrants == null || roleGrants.isEmpty()) {
+      return "";
+    }
+    StringBuilder builder = new StringBuilder();
+    // sort the list to get sorted (deterministic) output (for ease of testing)
+    Collections.sort(roleGrants);
+    for (HiveRoleGrant roleGrant : roleGrants) {
+      // schema: principal_name,principal_type,grant_option,grantor,grantor_type,grant_time
+      DDLUtils.appendNonNull(builder, roleGrant.getPrincipalName(), true);
+      DDLUtils.appendNonNull(builder, roleGrant.getPrincipalType());
+      DDLUtils.appendNonNull(builder, roleGrant.isGrantOption());
+      DDLUtils.appendNonNull(builder, roleGrant.getGrantor());
+      DDLUtils.appendNonNull(builder, roleGrant.getGrantorType());
+      DDLUtils.appendNonNull(builder, testMode ? -1 : roleGrant.getGrantTime() * 1000L);
+    }
+    return builder.toString();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java
similarity index 61%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java
index 7f1aa0c..3b713a1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java
@@ -16,48 +16,39 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.ddl.DDLTask2;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for DESC FUNCTION commands.
+ * DDL task description for SHOW ROLE GRANT commands.
  */
-@Explain(displayName = "Describe Function", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class DescFunctionDesc implements DDLDesc, Serializable {
+@Explain(displayName = "Show Role Grant", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class ShowRoleGrantDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  static {
-    DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class);
-  }
+  public static final String SCHEMA =
+      "role,grant_option,grant_time,grantor#" +
+      "string:boolean:bigint:string";
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  private static final String SCHEMA = "tab_name#string";
-  public static String getSchema() {
-    return SCHEMA;
+  static {
+    DDLTask2.registerOperation(ShowRoleGrantDesc.class, ShowRoleGrantOperation.class);
   }
 
-  private final String resFile;
   private final String name;
-  private final boolean isExtended;
+  private final PrincipalType principalType;
+  private final String resFile;
 
-  public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
-    this.resFile = resFile.toString();
+  public ShowRoleGrantDesc(String name, PrincipalType principalType, String resFile) {
     this.name = name;
-    this.isExtended = isExtended;
-  }
-
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
-  public String getResFile() {
-    return resFile;
+    this.principalType = principalType;
+    this.resFile = resFile;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -65,7 +56,11 @@ public class DescFunctionDesc implements DDLDesc, Serializable {
     return name;
   }
 
-  public boolean isExtended() {
-    return isExtended;
+  public PrincipalType getPrincipalType() {
+    return principalType;
+  }
+
+  public String getResFile() {
+    return resFile;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java
new file mode 100644
index 0000000..178ea8e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+
+/**
+ * Operation process of showing the role grants.
+ */
+public class ShowRoleGrantOperation extends DDLOperation {
+  private final ShowRoleGrantDesc desc;
+
+  public ShowRoleGrantOperation(DDLOperationContext context, ShowRoleGrantDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    boolean testMode = context.getConf().getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
+    List<HiveRoleGrant> roles = authorizer.getRoleGrantInfoForPrincipal(
+        AuthorizationUtils.getHivePrincipal(desc.getName(), desc.getPrincipalType()));
+    DDLUtils.writeToFile(writeRolesGrantedInfo(roles, testMode), desc.getResFile(), context);
+
+    return 0;
+  }
+
+  private String writeRolesGrantedInfo(List<HiveRoleGrant> roles, boolean testMode) {
+    if (roles == null || roles.isEmpty()) {
+      return "";
+    }
+    StringBuilder builder = new StringBuilder();
+    //sort the list to get sorted (deterministic) output (for ease of testing)
+    Collections.sort(roles);
+    for (HiveRoleGrant role : roles) {
+      DDLUtils.appendNonNull(builder, role.getRoleName(), true);
+      DDLUtils.appendNonNull(builder, role.isGrantOption());
+      DDLUtils.appendNonNull(builder, testMode ? -1 : role.getGrantTime() * 1000L);
+      DDLUtils.appendNonNull(builder, role.getGrantor());
+    }
+    return builder.toString();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java
similarity index 58%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java
index 8fa1ef1..36a0b1a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.table;
+package org.apache.hadoop.hive.ql.ddl.privilege;
 
 import java.io.Serializable;
 
@@ -26,36 +26,25 @@ import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for SHOW CREATE TABLE commands.
+ * DDL task description for SHOW ROLES commands.
  */
-@Explain(displayName = "Show Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class ShowCreateTableDesc implements DDLDesc, Serializable {
+@Explain(displayName = "Show Roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class ShowRolesDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "role#string";
+
   static {
-    DDLTask2.registerOperation(ShowCreateTableDesc.class, ShowCreateTableOperation.class);
+    DDLTask2.registerOperation(ShowRolesDesc.class, ShowRolesOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of showcreatetable.
-   */
-  public static final String SCHEMA = "createtab_stmt#string";
-
   private final String resFile;
-  private final String tableName;
 
-  public ShowCreateTableDesc(String tableName, String resFile) {
-    this.tableName = tableName;
+  public ShowRolesDesc(String resFile) {
     this.resFile = resFile;
   }
 
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
-
-  @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getTableName() {
-    return tableName;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java
new file mode 100644
index 0000000..22ca7f3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.privilege;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+
+/**
+ * Operation process of showing the roles.
+ */
+public class ShowRolesOperation extends DDLOperation {
+  private final ShowRolesDesc desc;
+
+  public ShowRolesOperation(DDLOperationContext context, ShowRolesDesc desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf());
+    List<String> allRoles = authorizer.getAllRoles();
+    RoleUtils.writeListToFileAfterSort(allRoles, desc.getResFile(), context);
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/package-info.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/package-info.java
new file mode 100644
index 0000000..596a803
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** Privilege related DDL operation descriptions and operations. */
+package org.apache.hadoop.hive.ql.ddl.privilege;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java
index 0cfffd2..bb533c2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java
@@ -35,6 +35,14 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class DescTableDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  private static final String SCHEMA = "col_name,data_type,comment#string:string:string";
+  private static final String COL_STATS_SCHEMA = "col_name,data_type,min,max,num_nulls," +
+      "distinct_count,avg_col_len,max_col_len,num_trues,num_falses,bitVector,comment" +
+      "#string:string:string:string:string:string:string:string:string:string:string:string";
+  public static String getSchema(boolean colStats) {
+    return colStats ? COL_STATS_SCHEMA : SCHEMA;
+  }
+
   static {
     DDLTask2.registerOperation(DescTableDesc.class, DescTableOperation.class);
   }
@@ -82,19 +90,4 @@ public class DescTableDesc implements DDLDesc, Serializable {
   public boolean isFormatted() {
     return isFormatted;
   }
-
-  /**
-   * thrift ddl for the result of describe table.
-   */
-  private static final String SCHEMA = "col_name,data_type,comment#string:string:string";
-  private static final String COL_STATS_SCHEMA = "col_name,data_type,min,max,num_nulls,"
-      + "distinct_count,avg_col_len,max_col_len,num_trues,num_falses,bitVector,comment"
-      + "#string:string:string:string:string:string:string:string:string:string:string:string";
-
-  public static String getSchema(boolean colStats) {
-    if (colStats) {
-      return COL_STATS_SCHEMA;
-    }
-    return SCHEMA;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
index 8fa1ef1..a06f1fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java
@@ -32,15 +32,12 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowCreateTableDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "createtab_stmt#string";
+
   static {
     DDLTask2.registerOperation(ShowCreateTableDesc.class, ShowCreateTableOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of showcreatetable.
-   */
-  public static final String SCHEMA = "createtab_stmt#string";
-
   private final String resFile;
   private final String tableName;
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java
index 72caa58..7ba1c2d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java
@@ -32,15 +32,12 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowTablePropertiesDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "prpt_name,prpt_value#string:string";
+
   static {
     DDLTask2.registerOperation(ShowTablePropertiesDesc.class, ShowTablePropertiesOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of showtblproperties.
-   */
-  public static final String SCHEMA = "prpt_name,prpt_value#string:string";
-
   private final String resFile;
   private final String tableName;
   private final String propertyName;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java
index 8c312a0..6707350 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java
@@ -33,15 +33,12 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowTableStatusDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static final String SCHEMA = "tab_name#string";
+
   static {
     DDLTask2.registerOperation(ShowTableStatusDesc.class, ShowTableStatusOperation.class);
   }
 
-  /**
-   * Thrift ddl for the result of show tables.
-   */
-  public static final String SCHEMA = "tab_name#string";
-
   private final String resFile;
   private final String dbName;
   private final String pattern;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java
index 584433b..9ec3904 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java
@@ -34,25 +34,13 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class ShowTablesDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  static {
-    DDLTask2.registerOperation(ShowTablesDesc.class, ShowTablesOperation.class);
-  }
-
-  /**
-   * thrift ddl for the result of show tables and show views.
-   */
   private static final String TABLES_VIEWS_SCHEMA = "tab_name#string";
-
-  /**
-   * thrift ddl for the result of show extended tables.
-   */
   private static final String EXTENDED_TABLES_SCHEMA = "tab_name,table_type#string,string";
+  private static final String MATERIALIZED_VIEWS_SCHEMA = "mv_name,rewrite_enabled,mode#string:string:string";
 
-  /**
-   * thrift ddl for the result of show tables.
-   */
-  private static final String MATERIALIZED_VIEWS_SCHEMA =
-      "mv_name,rewrite_enabled,mode#string:string:string";
+  static {
+    DDLTask2.registerOperation(ShowTablesDesc.class, ShowTablesOperation.class);
+  }
 
   private final String resFile;
   private final String dbName;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 7f0eb40..13d7d6f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.exec;
 import java.io.DataOutputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.OutputStreamWriter;
 import java.io.Serializable;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -44,7 +43,6 @@ import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.ListenableFuture;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
@@ -70,7 +68,6 @@ import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement;
 import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
@@ -149,8 +146,6 @@ import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc;
 import org.apache.hadoop.hive.ql.plan.DropWMPoolDesc;
 import org.apache.hadoop.hive.ql.plan.DropWMTriggerDesc;
 import org.apache.hadoop.hive.ql.plan.FileMergeDesc;
-import org.apache.hadoop.hive.ql.plan.GrantDesc;
-import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL;
 import org.apache.hadoop.hive.ql.plan.InsertCommitHookDesc;
 import org.apache.hadoop.hive.ql.plan.KillQueryDesc;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
@@ -159,18 +154,12 @@ import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.MsckDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.OrcFileMergeDesc;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.plan.RCFileMergeDesc;
 import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc;
 import org.apache.hadoop.hive.ql.plan.ReplRemoveFirstIncLoadPendFlagDesc;
-import org.apache.hadoop.hive.ql.plan.RevokeDesc;
-import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
-import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
 import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowResourcePlanDesc;
@@ -178,17 +167,6 @@ import org.apache.hadoop.hive.ql.plan.ShowTxnsDesc;
 import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
-import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
-import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -231,7 +209,6 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
   private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX;
 
   private MetaDataFormatter formatter;
-  private final HiveAuthorizationTranslator defaultAuthorizationTranslator = new DefaultHiveAuthorizationTranslator();
 
   @Override
   public boolean requireLock() {
@@ -355,35 +332,6 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
         return showConf(db, showConf);
       }
 
-      RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc();
-      if (roleDDLDesc != null) {
-        return roleDDL(db, roleDDLDesc);
-      }
-
-      GrantDesc grantDesc = work.getGrantDesc();
-      if (grantDesc != null) {
-        return grantOrRevokePrivileges(db, grantDesc.getPrincipals(), grantDesc
-            .getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), grantDesc.getGrantor(),
-            grantDesc.getGrantorType(), grantDesc.isGrantOption(), true);
-      }
-
-      RevokeDesc revokeDesc = work.getRevokeDesc();
-      if (revokeDesc != null) {
-        return grantOrRevokePrivileges(db, revokeDesc.getPrincipals(), revokeDesc
-            .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), null, null,
-            revokeDesc.isGrantOption(), false);
-      }
-
-      ShowGrantDesc showGrantDesc = work.getShowGrantDesc();
-      if (showGrantDesc != null) {
-        return showGrants(db, showGrantDesc);
-      }
-
-      GrantRevokeRoleDDL grantOrRevokeRoleDDL = work.getGrantRevokeRoleDDL();
-      if (grantOrRevokeRoleDDL != null) {
-        return grantOrRevokeRole(db, grantOrRevokeRoleDDL);
-      }
-
       AlterTablePartMergeFilesDesc mergeFilesDesc = work.getMergeFilesDesc();
       if (mergeFilesDesc != null) {
         return mergeFiles(db, mergeFilesDesc, driverContext);
@@ -815,165 +763,6 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     return ret;
   }
 
-  private HiveAuthorizer getSessionAuthorizer(Hive db) {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
-    if (authorizer == null) {
-      authorizer = new HiveV1Authorizer(conf, db);
-    }
-    return authorizer;
-  }
-
-  private int grantOrRevokeRole(Hive db, GrantRevokeRoleDDL grantOrRevokeRoleDDL)
-      throws HiveException {
-    HiveAuthorizer authorizer = getSessionAuthorizer(db);
-    //convert to the types needed for plugin api
-    HivePrincipal grantorPrinc = null;
-    if(grantOrRevokeRoleDDL.getGrantor() != null){
-      grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
-          AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
-    }
-    List<HivePrincipal> principals = AuthorizationUtils.getHivePrincipals(
-        grantOrRevokeRoleDDL.getPrincipalDesc(), getAuthorizationTranslator(authorizer));
-    List<String> roles = grantOrRevokeRoleDDL.getRoles();
-
-    boolean grantOption = grantOrRevokeRoleDDL.isGrantOption();
-    if (grantOrRevokeRoleDDL.getGrant()) {
-      authorizer.grantRole(principals, roles, grantOption, grantorPrinc);
-    } else {
-      authorizer.revokeRole(principals, roles, grantOption, grantorPrinc);
-    }
-    return 0;
-  }
-
-  private HiveAuthorizationTranslator getAuthorizationTranslator(HiveAuthorizer authorizer)
-      throws HiveAuthzPluginException {
-    if (authorizer.getHiveAuthorizationTranslator() == null) {
-      return defaultAuthorizationTranslator;
-    } else {
-      return (HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator();
-    }
-  }
-
-  private int showGrants(Hive db, ShowGrantDesc showGrantDesc) throws HiveException {
-
-    HiveAuthorizer authorizer = getSessionAuthorizer(db);
-    try {
-      List<HivePrivilegeInfo> privInfos = authorizer.showPrivileges(
-          getAuthorizationTranslator(authorizer).getHivePrincipal(showGrantDesc.getPrincipalDesc()),
-          getAuthorizationTranslator(authorizer).getHivePrivilegeObject(showGrantDesc.getHiveObj()));
-      boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      writeToFile(writeGrantInfo(privInfos, testMode), showGrantDesc.getResFile());
-    } catch (IOException e) {
-      throw new HiveException("Error in show grant statement", e);
-    }
-    return 0;
-  }
-
-  private int grantOrRevokePrivileges(Hive db, List<PrincipalDesc> principals,
-      List<PrivilegeDesc> privileges, PrivilegeObjectDesc privSubjectDesc,
-      String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant)
-          throws HiveException {
-
-    HiveAuthorizer authorizer = getSessionAuthorizer(db);
-
-    //Convert to object types used by the authorization plugin interface
-    List<HivePrincipal> hivePrincipals = AuthorizationUtils.getHivePrincipals(
-        principals, getAuthorizationTranslator(authorizer));
-    List<HivePrivilege> hivePrivileges = AuthorizationUtils.getHivePrivileges(
-        privileges, getAuthorizationTranslator(authorizer));
-    HivePrivilegeObject hivePrivObject = getAuthorizationTranslator(authorizer)
-        .getHivePrivilegeObject(privSubjectDesc);
-
-    HivePrincipal grantorPrincipal = new HivePrincipal(
-        grantor, AuthorizationUtils.getHivePrincipalType(grantorType));
-
-    if(isGrant){
-      authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
-          grantorPrincipal, grantOption);
-    }else {
-      authorizer.revokePrivileges(hivePrincipals, hivePrivileges,
-          hivePrivObject, grantorPrincipal, grantOption);
-    }
-    //no exception thrown, so looks good
-    return 0;
-  }
-
-  private int roleDDL(Hive db, RoleDDLDesc roleDDLDesc) throws Exception {
-    HiveAuthorizer authorizer = getSessionAuthorizer(db);
-    RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
-    //call the appropriate hive authorizer function
-    switch(operation){
-    case CREATE_ROLE:
-      authorizer.createRole(roleDDLDesc.getName(), null);
-      break;
-    case DROP_ROLE:
-      authorizer.dropRole(roleDDLDesc.getName());
-      break;
-    case SHOW_ROLE_GRANT:
-      boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      List<HiveRoleGrant> roles = authorizer.getRoleGrantInfoForPrincipal(
-          AuthorizationUtils.getHivePrincipal(roleDDLDesc.getName(), roleDDLDesc.getPrincipalType()));
-      writeToFile(writeRolesGrantedInfo(roles, testMode), roleDDLDesc.getResFile());
-      break;
-    case SHOW_ROLES:
-      List<String> allRoles = authorizer.getAllRoles();
-      writeListToFileAfterSort(allRoles, roleDDLDesc.getResFile());
-      break;
-    case SHOW_CURRENT_ROLE:
-      List<String> roleNames = authorizer.getCurrentRoleNames();
-      writeListToFileAfterSort(roleNames, roleDDLDesc.getResFile());
-      break;
-    case SET_ROLE:
-      authorizer.setCurrentRole(roleDDLDesc.getName());
-      break;
-    case SHOW_ROLE_PRINCIPALS:
-      testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      List<HiveRoleGrant> roleGrants = authorizer.getPrincipalGrantInfoForRole(roleDDLDesc.getName());
-      writeToFile(writeHiveRoleGrantInfo(roleGrants, testMode), roleDDLDesc.getResFile());
-      break;
-    default:
-      throw new HiveException("Unkown role operation "
-          + operation.getOperationName());
-    }
-
-    return 0;
-  }
-
-  private String writeHiveRoleGrantInfo(List<HiveRoleGrant> roleGrants, boolean testMode) {
-    if (roleGrants == null || roleGrants.isEmpty()) {
-      return "";
-    }
-    StringBuilder builder = new StringBuilder();
-    // sort the list to get sorted (deterministic) output (for ease of testing)
-    Collections.sort(roleGrants);
-    for (HiveRoleGrant roleGrant : roleGrants) {
-      // schema:
-      // principal_name,principal_type,grant_option,grantor,grantor_type,grant_time
-      appendNonNull(builder, roleGrant.getPrincipalName(), true);
-      appendNonNull(builder, roleGrant.getPrincipalType());
-      appendNonNull(builder, roleGrant.isGrantOption());
-      appendNonNull(builder, roleGrant.getGrantor());
-      appendNonNull(builder, roleGrant.getGrantorType());
-      appendNonNull(builder, testMode ? -1 : roleGrant.getGrantTime() * 1000L);
-    }
-    return builder.toString();
-  }
-
-  /**
-   * Write list of string entries into given file
-   * @param entries
-   * @param resFile
-   * @throws IOException
-   */
-  private void writeListToFileAfterSort(List<String> entries, String resFile) throws IOException {
-    Collections.sort(entries);
-    StringBuilder sb = new StringBuilder();
-    for(String entry : entries){
-      appendNonNull(sb, entry, true);
-    }
-    writeToFile(sb.toString(), resFile);
-  }
-
   /**
    * Add a partitions to a table.
    *
@@ -2319,93 +2108,6 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     return 0;
   }
 
-  private void writeToFile(String data, String file) throws IOException {
-    Path resFile = new Path(file);
-    FileSystem fs = resFile.getFileSystem(conf);
-    FSDataOutputStream out = fs.create(resFile);
-    try {
-      if (data != null && !data.isEmpty()) {
-        OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8");
-        writer.write(data);
-        writer.write((char) terminator);
-        writer.flush();
-      }
-    } finally {
-      IOUtils.closeStream(out);
-    }
-  }
-
-  private String writeGrantInfo(List<HivePrivilegeInfo> privileges, boolean testMode) {
-    if (privileges == null || privileges.isEmpty()) {
-      return "";
-    }
-    StringBuilder builder = new StringBuilder();
-    //sort the list to get sorted (deterministic) output (for ease of testing)
-    Collections.sort(privileges, new Comparator<HivePrivilegeInfo>() {
-      @Override
-      public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) {
-        int compare = o1.getObject().compareTo(o2.getObject());
-        if (compare == 0) {
-          compare = o1.getPrincipal().compareTo(o2.getPrincipal());
-        }
-        if (compare == 0) {
-          compare = o1.getPrivilege().compareTo(o2.getPrivilege());
-        }
-        return compare;
-      }
-    });
-
-    for (HivePrivilegeInfo privilege : privileges) {
-      HivePrincipal principal = privilege.getPrincipal();
-      HivePrivilegeObject resource = privilege.getObject();
-      HivePrincipal grantor = privilege.getGrantorPrincipal();
-
-      appendNonNull(builder, resource.getDbname(), true);
-      appendNonNull(builder, resource.getObjectName());
-      appendNonNull(builder, resource.getPartKeys());
-      appendNonNull(builder, resource.getColumns());
-      appendNonNull(builder, principal.getName());
-      appendNonNull(builder, principal.getType());
-      appendNonNull(builder, privilege.getPrivilege().getName());
-      appendNonNull(builder, privilege.isGrantOption());
-      appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L);
-      appendNonNull(builder, grantor.getName());
-    }
-    return builder.toString();
-  }
-
-  private String writeRolesGrantedInfo(List<HiveRoleGrant> roles, boolean testMode) {
-    if (roles == null || roles.isEmpty()) {
-      return "";
-    }
-    StringBuilder builder = new StringBuilder();
-    //sort the list to get sorted (deterministic) output (for ease of testing)
-    Collections.sort(roles);
-    for (HiveRoleGrant role : roles) {
-      appendNonNull(builder, role.getRoleName(), true);
-      appendNonNull(builder, role.isGrantOption());
-      appendNonNull(builder, testMode ? -1 : role.getGrantTime() * 1000L);
-      appendNonNull(builder, role.getGrantor());
-    }
-    return builder.toString();
-  }
-
-  private StringBuilder appendNonNull(StringBuilder builder, Object value) {
-    return appendNonNull(builder, value, false);
-  }
-
-  private StringBuilder appendNonNull(StringBuilder builder, Object value, boolean firstColumn) {
-    if (!firstColumn) {
-      builder.append((char)separator);
-    } else if (builder.length() > 0) {
-      builder.append((char)terminator);
-    }
-    if (value != null) {
-      builder.append(value);
-    }
-    return builder;
-  }
-
   /**
    * Alter a given table.
    *
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
index c892b40..3437895 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.ql.ddl.DDLWork2;
 import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.DatabaseEvent;
@@ -31,7 +32,6 @@ import org.apache.hadoop.hive.ql.exec.repl.util.TaskTracker;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
 import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.ReplLoadOpType;
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index d187d19..d2c3f7b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -83,6 +83,11 @@ import org.apache.hadoop.hive.ql.ddl.database.SwitchDatabaseDesc;
 import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.ddl.function.DescFunctionDesc;
 import org.apache.hadoop.hive.ql.ddl.function.ShowFunctionsDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowGrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowPrincipalsDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowRoleGrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowRolesDesc;
 import org.apache.hadoop.hive.ql.ddl.table.DescTableDesc;
 import org.apache.hadoop.hive.ql.ddl.table.DropTableDesc;
 import org.apache.hadoop.hive.ql.ddl.table.LockTableDesc;
@@ -157,13 +162,10 @@ import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.MsckDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc;
-import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
-import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
 import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowResourcePlanDesc;
@@ -666,20 +668,18 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
 
   private void analyzeSetShowRole(ASTNode ast) throws SemanticException {
     switch (ast.getChildCount()) {
-      case 0:
-        ctx.setResFile(ctx.getLocalTmpPath());
-        rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask(
-        getInputs(), getOutputs(), ctx.getResFile()));
-        setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema()));
-        break;
-      case 1:
-        rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask(
-        BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()),
-        getInputs(), getOutputs()));
-        break;
-      default:
-        throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. "
-        + ast.dump());
+    case 0:
+      ctx.setResFile(ctx.getLocalTmpPath());
+      rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask(
+          getInputs(), getOutputs(), ctx.getResFile()));
+      setFetchTask(createFetchTask(ShowRolesDesc.SCHEMA));
+      break;
+    case 1:
+      rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask(
+          BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()), getInputs(), getOutputs()));
+      break;
+    default:
+      throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. " + ast.dump());
     }
   }
 
@@ -700,7 +700,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         createShowGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs());
     if(task != null) {
       rootTasks.add(task);
-      setFetchTask(createFetchTask(ShowGrantDesc.getSchema()));
+      setFetchTask(createFetchTask(ShowGrantDesc.SCHEMA));
     }
   }
 
@@ -741,17 +741,17 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         createShowRoleGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs());
     if(task != null) {
       rootTasks.add(task);
-      setFetchTask(createFetchTask(RoleDDLDesc.getRoleShowGrantSchema()));
+      setFetchTask(createFetchTask(ShowRoleGrantDesc.SCHEMA));
     }
   }
 
   private void analyzeShowRolePrincipals(ASTNode ast) throws SemanticException {
-    Task<DDLWork> roleDDLTask = (Task<DDLWork>) hiveAuthorizationTaskFactory
+    Task<?> roleDDLTask = (Task<?>) hiveAuthorizationTaskFactory
         .createShowRolePrincipalsTask(ast, ctx.getResFile(), getInputs(), getOutputs());
 
     if (roleDDLTask != null) {
       rootTasks.add(roleDDLTask);
-      setFetchTask(createFetchTask(RoleDDLDesc.getShowRolePrincipalsSchema()));
+      setFetchTask(createFetchTask(ShowPrincipalsDesc.SCHEMA));
     }
   }
 
@@ -762,7 +762,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
 
     if (roleDDLTask != null) {
       rootTasks.add(roleDDLTask);
-      setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema()));
+      setFetchTask(createFetchTask(ShowRolesDesc.SCHEMA));
     }
   }
 
@@ -1605,7 +1605,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc,
             partSpec == null ? new HashMap<>() : partSpec);
         ltd.setLbCtx(lbCtx);
-        @SuppressWarnings("unchecked")
         Task<MoveWork> moveTsk =
             TaskFactory.get(new MoveWork(null, null, ltd, null, false));
         truncateTask.addDependentTask(moveTsk);
@@ -2801,7 +2800,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       showFuncsDesc = new ShowFunctionsDesc(ctx.getResFile());
     }
     rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showFuncsDesc)));
-    setFetchTask(createFetchTask(ShowFunctionsDesc.getSchema()));
+    setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA));
   }
 
   /**
@@ -3154,7 +3153,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
 
     DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), funcName, isExtended);
     rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descFuncDesc)));
-    setFetchTask(createFetchTask(DescFunctionDesc.getSchema()));
+    setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA));
   }
 
 
@@ -4111,7 +4110,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
      * Throw an error if the user tries to use the DDL with
      * hive.internal.ddl.list.bucketing.enable set to false.
      */
-    HiveConf hiveConf = SessionState.get().getConf();
+    SessionState.get().getConf();
 
     Table tab = getTable(qualified);
 
@@ -4255,7 +4254,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
      * Throw an error if the user tries to use the DDL with
      * hive.internal.ddl.list.bucketing.enable set to false.
      */
-    HiveConf hiveConf = SessionState.get().getConf();
+    SessionState.get().getConf();
     /**
      * Retrieve mappings from parser
      */
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java
index de5c907..c8f1246 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java
@@ -21,10 +21,10 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 
 /**
  * Utility functions for creating objects relevant for authorization operations
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
index 18ed6fb..61b60680 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
@@ -27,6 +27,22 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLWork2;
+import org.apache.hadoop.hive.ql.ddl.privilege.CreateRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.DropRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.GrantRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.RevokeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.RevokeRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.SetRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowCurrentRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowGrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowPrincipalsDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowRoleGrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowRolesDesc;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -40,16 +56,6 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.GrantDesc;
-import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
-import org.apache.hadoop.hive.ql.plan.RevokeDesc;
-import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
-import org.apache.hadoop.hive.ql.plan.RoleDDLDesc.RoleOperation;
-import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
@@ -70,15 +76,15 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
   public Task<? extends Serializable> createCreateRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
       HashSet<WriteEntity> outputs) {
     String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
-    RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.CREATE_ROLE, null);
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
+    CreateRoleDesc createRoleDesc = new CreateRoleDesc(roleName);
+    return TaskFactory.get(new DDLWork2(inputs, outputs, createRoleDesc));
   }
   @Override
   public Task<? extends Serializable> createDropRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
       HashSet<WriteEntity> outputs) {
     String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
-    RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.DROP_ROLE, null);
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
+    DropRoleDesc dropRoleDesc = new DropRoleDesc(roleName);
+    return TaskFactory.get(new DDLWork2(inputs, outputs, dropRoleDesc));
   }
   @Override
   public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile,
@@ -97,10 +103,8 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
       break;
     }
     String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
-    RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType,
-        RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
-    roleDesc.setResFile(resultFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
+    ShowRoleGrantDesc showRoleGrantDesc = new ShowRoleGrantDesc(principalName, principalType, resultFile.toString());
+    return TaskFactory.get(new DDLWork2(inputs, outputs, showRoleGrantDesc));
   }
   @Override
   public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs,
@@ -127,7 +131,7 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
 
     GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
         principalDesc, userName, PrincipalType.USER, grantOption);
-    return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
+    return TaskFactory.get(new DDLWork2(inputs, outputs, grantDesc));
   }
 
   @Override
@@ -146,12 +150,7 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
       }
     }
     RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption);
-    return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc));
-  }
-  @Override
-  public Task<? extends Serializable> createGrantRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
-      HashSet<WriteEntity> outputs) {
-    return analyzeGrantRevokeRole(true, ast, inputs, outputs);
+    return TaskFactory.get(new DDLWork2(inputs, outputs, revokeDesc));
   }
   @Override
   public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs,
@@ -171,17 +170,20 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
 
     if (param != null) {
       if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
-        privHiveObj = new PrivilegeObjectDesc();
+        privHiveObj = new PrivilegeObjectDesc(true, null, null, null);
       } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
         privHiveObj = parsePrivObject(param);
       }
     }
 
-    ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
-        principalDesc, privHiveObj);
-    return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
+    ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj);
+    return TaskFactory.get(new DDLWork2(inputs, outputs, showGrant));
+  }
+  @Override
+  public Task<? extends Serializable> createGrantRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
+      HashSet<WriteEntity> outputs) {
+    return analyzeGrantRevokeRole(true, ast, inputs, outputs);
   }
-
   @Override
   public Task<? extends Serializable> createRevokeRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
       HashSet<WriteEntity> outputs) {
@@ -211,10 +213,13 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
 
     //until change is made to use the admin option. Default to false with V2 authorization
 
-
-    GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant,
-        roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
-    return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
+    if (isGrant) {
+      GrantRoleDesc grantRoleDesc = new GrantRoleDesc(roles, principalDesc, roleOwnerName, isAdmin);
+      return TaskFactory.get(new DDLWork2(inputs, outputs, grantRoleDesc));
+    } else {
+      RevokeRoleDesc revokeRoleDesc = new RevokeRoleDesc(roles, principalDesc, roleOwnerName, isAdmin);
+      return TaskFactory.get(new DDLWork2(inputs, outputs, revokeRoleDesc));
+    }
   }
 
   private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast,
@@ -237,29 +242,33 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
   }
 
   protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
-    PrivilegeObjectDesc subject = new PrivilegeObjectDesc();
+    boolean isTable;
+    String object = null;
+    Map<String, String> partSpec = null;
+    List<String> columns = null;
+
     ASTNode child = (ASTNode) ast.getChild(0);
     ASTNode gchild = (ASTNode)child.getChild(0);
     if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
-      subject.setTable(true);
+      isTable = true;
       String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild);
-      subject.setObject(BaseSemanticAnalyzer.getDotName(qualified));
+      object = BaseSemanticAnalyzer.getDotName(qualified);
     } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) {
       throw new SemanticException("Hive authorization does not support the URI or SERVER objects");
     } else {
-      subject.setTable(false);
-      subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText()));
+      isTable = false;
+      object = BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText());
     }
     //if partition spec node is present, set partition spec
     for (int i = 1; i < child.getChildCount(); i++) {
       gchild = (ASTNode) child.getChild(i);
       if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
-        subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild));
+        partSpec = DDLSemanticAnalyzer.getPartSpec(gchild);
       } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
-        subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild));
+        columns = BaseSemanticAnalyzer.getColumnNames(gchild);
       }
     }
-    return subject;
+    return new PrivilegeObjectDesc(isTable, object, partSpec, columns);
   }
 
   private List<PrivilegeDesc> analyzePrivilegeListDef(ASTNode node)
@@ -329,17 +338,16 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
   public Task<? extends Serializable> createSetRoleTask(String roleName,
       HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs)
       throws SemanticException {
-    return TaskFactory.get(new DDLWork(inputs, outputs, new RoleDDLDesc(roleName, PrincipalType.ROLE,
-        RoleDDLDesc.RoleOperation.SET_ROLE, null)));
+    SetRoleDesc setRoleDesc = new SetRoleDesc(roleName);
+    return TaskFactory.get(new DDLWork2(inputs, outputs, setRoleDesc));
   }
 
   @Override
   public Task<? extends Serializable> createShowCurrentRoleTask(
       HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, Path resFile)
       throws SemanticException {
-    RoleDDLDesc ddlDesc = new RoleDDLDesc(null, RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE);
-    ddlDesc.setResFile(resFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc));
+    ShowCurrentRoleDesc showCurrentRoleDesc = new ShowCurrentRoleDesc(resFile.toString());
+    return TaskFactory.get(new DDLWork2(inputs, outputs, showCurrentRoleDesc));
   }
 
   @Override
@@ -354,19 +362,15 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
       throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS");
     }
 
-    RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE,
-     RoleOperation.SHOW_ROLE_PRINCIPALS, null);
-    roleDDLDesc.setResFile(resFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc));
+    ShowPrincipalsDesc showPrincipalsDesc = new ShowPrincipalsDesc(roleName, resFile.toString());
+    return TaskFactory.get(new DDLWork2(inputs, outputs, showPrincipalsDesc));
   }
 
   @Override
   public Task<? extends Serializable> createShowRolesTask(ASTNode ast, Path resFile,
       HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
-    RoleDDLDesc showRolesDesc = new RoleDDLDesc(null, null, RoleDDLDesc.RoleOperation.SHOW_ROLES,
-        null);
-    showRolesDesc.setResFile(resFile.toString());
-    return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc));
+    ShowRolesDesc showRolesDesc = new ShowRolesDesc(resFile.toString());
+    return TaskFactory.get(new DDLWork2(inputs, outputs, showRolesDesc));
   }
 
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
index 7162375..edeaaa2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
@@ -22,13 +22,13 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage;
 import org.apache.hadoop.hive.ql.ddl.DDLWork2;
 import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.repl.dump.Utils;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 
 import java.io.Serializable;
 import java.util.Collections;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
index 41b6db6..b81aa2d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
@@ -26,12 +26,12 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.ddl.DDLWork2;
 import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.parse.EximUtil;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.repl.load.MetaData;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 
 import java.io.IOException;
 import java.io.Serializable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
index d70353e..8603521 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
index c3863e1..e6f3a6f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
@@ -50,12 +50,6 @@ public class DDLWork implements Serializable {
   private AlterTableExchangePartition alterTableExchangePartition;
   private KillQueryDesc killQueryDesc;
 
-  private RoleDDLDesc roleDDLDesc;
-  private GrantDesc grantDesc;
-  private ShowGrantDesc showGrantDesc;
-  private RevokeDesc revokeDesc;
-  private GrantRevokeRoleDDL grantRevokeRoleDDL;
-
   private ShowConfDesc showConfDesc;
 
   private CreateResourcePlanDesc createResourcePlanDesc;
@@ -211,36 +205,6 @@ public class DDLWork implements Serializable {
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      RoleDDLDesc roleDDLDesc) {
-    this(inputs, outputs);
-    this.roleDDLDesc = roleDDLDesc;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      GrantDesc grantDesc) {
-    this(inputs, outputs);
-    this.grantDesc = grantDesc;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      ShowGrantDesc showGrant) {
-    this(inputs, outputs);
-    this.showGrantDesc = showGrant;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      RevokeDesc revokeDesc) {
-    this(inputs, outputs);
-    this.revokeDesc = revokeDesc;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      GrantRevokeRoleDDL grantRevokeRoleDDL) {
-    this(inputs, outputs);
-    this.grantRevokeRoleDDL = grantRevokeRoleDDL;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       AlterTablePartMergeFilesDesc mergeDesc) {
     this(inputs, outputs);
     this.mergeFilesDesc = mergeDesc;
@@ -453,35 +417,6 @@ public class DDLWork implements Serializable {
   }
 
   /**
-   * @return role ddl desc
-   */
-  public RoleDDLDesc getRoleDDLDesc() {
-    return roleDDLDesc;
-  }
-
-  /**
-   * @return grant desc
-   */
-  public GrantDesc getGrantDesc() {
-    return grantDesc;
-  }
-
-  /**
-   * @return show grant desc
-   */
-  public ShowGrantDesc getShowGrantDesc() {
-    return showGrantDesc;
-  }
-
-  public RevokeDesc getRevokeDesc() {
-    return revokeDesc;
-  }
-
-  public GrantRevokeRoleDDL getGrantRevokeRoleDDL() {
-    return grantRevokeRoleDDL;
-  }
-
-  /**
    * @return descriptor for merging files
    */
   public AlterTablePartMergeFilesDesc getMergeFilesDesc() {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java
deleted file mode 100644
index 07529d9..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.util.List;
-
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-
-@Explain(displayName="grant or revoke roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class GrantRevokeRoleDDL {
-
-  private boolean grant;
-
-  private List<PrincipalDesc> principalDesc;
-
-  private List<String> roles;
-
-  private String grantor;
-
-  private PrincipalType grantorType;
-
-  private boolean grantOption;
-
-  public GrantRevokeRoleDDL() {
-  }
-
-  public GrantRevokeRoleDDL(boolean grant, List<String> roles,
-      List<PrincipalDesc> principalDesc, String grantor,
-      PrincipalType grantorType, boolean grantOption) {
-    super();
-    this.grant = grant;
-    this.principalDesc = principalDesc;
-    this.roles = roles;
-    this.grantor = grantor;
-    this.grantorType = grantorType;
-    this.grantOption = grantOption;
-  }
-
-  /**
-   * @return grant or revoke privileges
-   */
-  @Explain(displayName="grant (or revoke)", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public boolean getGrant() {
-    return grant;
-  }
-
-  public void setGrant(boolean grant) {
-    this.grant = grant;
-  }
-
-  /**
-   * @return a list of principals
-   */
-  @Explain(displayName="principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public List<PrincipalDesc> getPrincipalDesc() {
-    return principalDesc;
-  }
-
-  public void setPrincipalDesc(List<PrincipalDesc> principalDesc) {
-    this.principalDesc = principalDesc;
-  }
-
-  /**
-   * @return a list of roles
-   */
-  @Explain(displayName="roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public List<String> getRoles() {
-    return roles;
-  }
-
-  public void setRoles(List<String> roles) {
-    this.roles = roles;
-  }
-
-  public String getGrantor() {
-    return grantor;
-  }
-
-  public void setGrantor(String grantor) {
-    this.grantor = grantor;
-  }
-
-  public PrincipalType getGrantorType() {
-    return grantorType;
-  }
-
-  public void setGrantorType(PrincipalType grantorType) {
-    this.grantorType = grantorType;
-  }
-
-  public boolean isGrantOption() {
-    return grantOption;
-  }
-
-  public void setGrantOption(boolean grantOption) {
-    this.grantOption = grantOption;
-  }
-
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
deleted file mode 100644
index afe7faf..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-
-@Explain(displayName = "Create Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class RoleDDLDesc extends DDLDesc implements Serializable {
-
-  private static final long serialVersionUID = 1L;
-
-  private String name;
-
-  private PrincipalType principalType;
-
-  private boolean group;
-
-  private RoleOperation operation;
-
-  private String resFile;
-
-  private String roleOwnerName;
-
-  /**
-   * thrift ddl for the result of show roles.
-   */
-  private static final String roleNameSchema = "role#string";
-
-  /**
-   * thrift ddl for the result of show role grant principalName
-   */
-  private static final String roleShowGrantSchema =
-      "role,grant_option,grant_time,grantor#" +
-      "string:boolean:bigint:string";
-
-  /**
-   * thrift ddl for the result of describe role roleName
-   */
-  private static final String roleShowRolePrincipals =
-      "principal_name,principal_type,grant_option,grantor,grantor_type,grant_time#" +
-      "string:string:boolean:string:string:bigint";
-
-  public static String getRoleNameSchema() {
-    return roleNameSchema;
-  }
-
-  public static String getRoleShowGrantSchema() {
-    return roleShowGrantSchema;
-  }
-
-  public static String getShowRolePrincipalsSchema() {
-    return roleShowRolePrincipals;
-  }
-
-  public static enum RoleOperation {
-    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"),
-    SHOW_ROLES("show_roles"), SET_ROLE("set_role"), SHOW_CURRENT_ROLE("show_current_role"),
-    SHOW_ROLE_PRINCIPALS("show_role_principals");
-    private String operationName;
-
-    private RoleOperation() {
-    }
-
-    private RoleOperation(String operationName) {
-      this.operationName = operationName;
-    }
-
-    public String getOperationName() {
-      return operationName;
-    }
-
-    @Override
-    public String toString () {
-      return this.operationName;
-    }
-  }
-
-  public RoleDDLDesc(){
-  }
-
-  public RoleDDLDesc(String roleName, RoleOperation operation) {
-    this(roleName, PrincipalType.USER, operation, null);
-  }
-
-  public RoleDDLDesc(String principalName, PrincipalType principalType,
-      RoleOperation operation, String roleOwnerName) {
-    this.name = principalName;
-    this.principalType = principalType;
-    this.operation = operation;
-    this.roleOwnerName = roleOwnerName;
-  }
-
-  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getName() {
-    return name;
-  }
-
-  @Explain(displayName = "role operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public RoleOperation getOperation() {
-    return operation;
-  }
-
-  public void setOperation(RoleOperation operation) {
-    this.operation = operation;
-  }
-
-  public PrincipalType getPrincipalType() {
-    return principalType;
-  }
-
-  public void setPrincipalType(PrincipalType principalType) {
-    this.principalType = principalType;
-  }
-
-  public boolean getGroup() {
-    return group;
-  }
-
-  public void setGroup(boolean group) {
-    this.group = group;
-  }
-
-  public String getResFile() {
-    return resFile;
-  }
-
-  public void setResFile(String resFile) {
-    this.resFile = resFile;
-  }
-
-  public String getRoleOwnerName() {
-    return roleOwnerName;
-  }
-
-  public void setRoleOwnerName(String roleOwnerName) {
-    this.roleOwnerName = roleOwnerName;
-  }
-
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
index f690422..fbf8189 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
@@ -27,15 +27,15 @@ import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java
index 853dcf8..537b9de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hive.ql.security.authorization;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
index 1e9c639..a8afcd6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
@@ -22,6 +22,9 @@ import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 /**
  * Privilege defines a privilege in Hive. Each privilege has a name and scope associated with it.
  * This class contains all of the predefined privileges in Hive.
@@ -44,6 +47,7 @@ public class Privilege {
 
   }
 
+  @Explain(skipHeader = true, explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public PrivilegeType getPriv() {
     return priv;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
index 7037f2c..4dd67f4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
@@ -22,6 +22,8 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
  * Privilege type
@@ -49,6 +51,7 @@ public enum PrivilegeType {
   }
 
   @Override
+  @Explain(displayName = "type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String toString(){
     return name == null ? "unkown" : name;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java
index 29ce9ed..a0e5d66 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java
@@ -19,10 +19,10 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 
 /**
  * This interface has functions that provide the ability to customize the translation
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
index fed0d01..68f7380 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
@@ -21,14 +21,13 @@ import java.util.List;
 
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork2;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
-import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
-
 import org.junit.Assert;
 
 /**
@@ -36,31 +35,15 @@ import org.junit.Assert;
  */
 public class AuthorizationTestUtil {
 
-  /**
-   * Create DDLWork from given ast
-   * @param ast
-   * @param conf
-   * @param db
-   * @return
-   * @throws Exception
-   */
-  public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
+  public static DDLWork2 analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
     DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
     SessionState.start(queryState.getConf());
     analyzer.analyze(ast, new Context(queryState.getConf()));
     List<Task<?>> rootTasks = analyzer.getRootTasks();
-    return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
+    return (DDLWork2) inList(rootTasks).ofSize(1).get(0).getWork();
   }
 
-  /**
-   * Create DDLWork from given command string
-   * @param command
-   * @param conf
-   * @param db
-   * @return
-   * @throws Exception
-   */
-  public static DDLWork analyze(String command, QueryState queryState, Hive db) throws Exception {
+  public static DDLWork2 analyze(String command, QueryState queryState, Hive db) throws Exception {
     return analyze(parse(command), queryState, db);
   }
 
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
index a15fd5d..b194cc3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java
@@ -17,17 +17,15 @@
  */
 package org.apache.hadoop.hive.ql.parse.authorization;
 
-import org.junit.Assert;
-
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork2;
+import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc;
 import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.GrantDesc;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
+import org.junit.Assert;
 
 public class PrivilegesTestBase {
   protected static final String DB = "default";
@@ -37,8 +35,9 @@ public class PrivilegesTestBase {
 
   public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db)
       throws Exception {
-    DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db);
-    GrantDesc grantDesc = work.getGrantDesc();
+    DDLWork2 work = AuthorizationTestUtil.analyze(
+        "GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db);
+    GrantDesc grantDesc = (GrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
 
     //check privileges
@@ -51,8 +50,8 @@ public class PrivilegesTestBase {
       Assert.assertEquals(PrincipalType.USER, principal.getType());
       Assert.assertEquals(USER, principal.getName());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
 
 }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
index 9a8c032..e7a1bd6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
@@ -24,26 +24,25 @@ import org.junit.Assert;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork2;
+import org.apache.hadoop.hive.ql.ddl.privilege.CreateRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.DropRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.GrantRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.RevokeDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.RevokeRoleDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowGrantDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.ShowRoleGrantDesc;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.GrantDesc;
-import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL;
-import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
-import org.apache.hadoop.hive.ql.plan.RevokeDesc;
-import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
-import org.apache.hadoop.hive.ql.plan.RoleDDLDesc.RoleOperation;
-import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -88,10 +87,7 @@ public class TestHiveAuthorizationTaskFactory {
   private static final String ROLE = "role1";
   private static final String USER = "user1";
 
-  private ParseDriver parseDriver;
-  private DDLSemanticAnalyzer analyzer;
   private QueryState queryState;
-  private Context context;
   private String currentUser;
   private Hive db;
   private Table table;
@@ -110,9 +106,6 @@ public class TestHiveAuthorizationTaskFactory {
     table = new Table(DB, TABLE);
     partition = new Partition(table);
     SessionState.start(conf);
-    context = new Context(conf);
-    parseDriver = new ParseDriver();
-    analyzer = new DDLSemanticAnalyzer(queryState, db);
     Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table);
     Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table);
     Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
@@ -128,11 +121,9 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testCreateRole() throws Exception {
-    DDLWork work = analyze("CREATE ROLE " + ROLE);
-    RoleDDLDesc roleDesc = work.getRoleDDLDesc();
+    DDLWork2 work = analyze("CREATE ROLE " + ROLE);
+    CreateRoleDesc roleDesc = (CreateRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Role should not be null", roleDesc);
-    Assert.assertEquals(RoleOperation.CREATE_ROLE, roleDesc.getOperation());
-    Assert.assertFalse("Did not expect a group", roleDesc.getGroup());
     Assert.assertEquals(ROLE, roleDesc.getName());
   }
   /**
@@ -140,11 +131,9 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testDropRole() throws Exception {
-    DDLWork work = analyze("DROp ROLE " + ROLE);
-    RoleDDLDesc roleDesc = work.getRoleDDLDesc();
+    DDLWork2 work = analyze("DROp ROLE " + ROLE);
+    DropRoleDesc roleDesc = (DropRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Role should not be null", roleDesc);
-    Assert.assertEquals(RoleOperation.DROP_ROLE, roleDesc.getOperation());
-    Assert.assertFalse("Did not expect a group", roleDesc.getGroup());
     Assert.assertEquals(ROLE, roleDesc.getName());
   }
   /**
@@ -152,8 +141,8 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testGrantUserTable() throws Exception {
-    DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER);
-    GrantDesc grantDesc = work.getGrantDesc();
+    DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER);
+    GrantDesc grantDesc = (GrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
     for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.USER, principal.getType());
@@ -162,16 +151,16 @@ public class TestHiveAuthorizationTaskFactory {
     for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
       Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
   /**
    * GRANT ... ON TABLE ... TO ROLE ...
    */
   @Test
   public void testGrantRoleTable() throws Exception {
-    DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO ROLE " + ROLE);
-    GrantDesc grantDesc = work.getGrantDesc();
+    DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO ROLE " + ROLE);
+    GrantDesc grantDesc = (GrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
     for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.ROLE, principal.getType());
@@ -180,16 +169,16 @@ public class TestHiveAuthorizationTaskFactory {
     for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
       Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
   /**
    * GRANT ... ON TABLE ... TO GROUP ...
    */
   @Test
   public void testGrantGroupTable() throws Exception {
-    DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO GROUP " + GROUP);
-    GrantDesc grantDesc = work.getGrantDesc();
+    DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO GROUP " + GROUP);
+    GrantDesc grantDesc = (GrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
     for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.GROUP, principal.getType());
@@ -198,16 +187,16 @@ public class TestHiveAuthorizationTaskFactory {
     for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
       Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
   /**
    * REVOKE ... ON TABLE ... FROM USER ...
    */
   @Test
   public void testRevokeUserTable() throws Exception {
-    DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM USER " + USER);
-    RevokeDesc grantDesc = work.getRevokeDesc();
+    DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM USER " + USER);
+    RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc();
     Assert.assertNotNull("Revoke should not be null", grantDesc);
     for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.USER, principal.getType());
@@ -216,16 +205,16 @@ public class TestHiveAuthorizationTaskFactory {
     for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
       Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
   /**
    * REVOKE ... ON TABLE ... FROM ROLE ...
    */
   @Test
   public void testRevokeRoleTable() throws Exception {
-    DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM ROLE " + ROLE);
-    RevokeDesc grantDesc = work.getRevokeDesc();
+    DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM ROLE " + ROLE);
+    RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc();
     Assert.assertNotNull("Revoke should not be null", grantDesc);
     for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.ROLE, principal.getType());
@@ -234,16 +223,16 @@ public class TestHiveAuthorizationTaskFactory {
     for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
       Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
   /**
    * REVOKE ... ON TABLE ... FROM GROUP ...
    */
   @Test
   public void testRevokeGroupTable() throws Exception {
-    DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP);
-    RevokeDesc grantDesc = work.getRevokeDesc();
+    DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP);
+    RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc();
     Assert.assertNotNull("Revoke should not be null", grantDesc);
     for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.GROUP, principal.getType());
@@ -252,25 +241,23 @@ public class TestHiveAuthorizationTaskFactory {
     for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
       Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
     }
-    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
-    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
+    Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable());
+    Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject());
   }
   /**
    * GRANT ROLE ... TO USER ...
    */
   @Test
   public void testGrantRoleUser() throws Exception {
-    DDLWork work = analyze("GRANT ROLE " + ROLE + " TO USER " + USER);
-    GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL();
+    DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO USER " + USER);
+    GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
-    Assert.assertTrue("Expected grant ", grantDesc.getGrant());
     Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
     Assert.assertEquals(currentUser, grantDesc.getGrantor());
-    Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType());
     for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
       Assert.assertEquals(ROLE, role);
     }
-    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
+    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.USER, principal.getType());
       Assert.assertEquals(USER, principal.getName());
     }
@@ -280,17 +267,15 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testGrantRoleRole() throws Exception {
-    DDLWork work = analyze("GRANT ROLE " + ROLE + " TO ROLE " + ROLE);
-    GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL();
+    DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO ROLE " + ROLE);
+    GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
-    Assert.assertTrue("Expected grant ", grantDesc.getGrant());
     Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
     Assert.assertEquals(currentUser, grantDesc.getGrantor());
-    Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType());
     for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
       Assert.assertEquals(ROLE, role);
     }
-    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
+    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.ROLE, principal.getType());
       Assert.assertEquals(ROLE, principal.getName());
     }
@@ -300,17 +285,15 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testGrantRoleGroup() throws Exception {
-    DDLWork work = analyze("GRANT ROLE " + ROLE + " TO GROUP " + GROUP);
-    GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL();
+    DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO GROUP " + GROUP);
+    GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
-    Assert.assertTrue("Expected grant ", grantDesc.getGrant());
     Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
     Assert.assertEquals(currentUser, grantDesc.getGrantor());
-    Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType());
     for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
       Assert.assertEquals(ROLE, role);
     }
-    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
+    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.GROUP, principal.getType());
       Assert.assertEquals(GROUP, principal.getName());
     }
@@ -320,17 +303,15 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testRevokeRoleUser() throws Exception {
-    DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM USER " + USER);
-    GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL();
+    DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM USER " + USER);
+    RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
-    Assert.assertFalse("Did not expect grant ", grantDesc.getGrant());
     Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
     Assert.assertEquals(currentUser, grantDesc.getGrantor());
-    Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType());
     for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
       Assert.assertEquals(ROLE, role);
     }
-    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
+    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.USER, principal.getType());
       Assert.assertEquals(USER, principal.getName());
     }
@@ -340,17 +321,15 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testRevokeRoleRole() throws Exception {
-    DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE);
-    GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL();
+    DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE);
+    RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
-    Assert.assertFalse("Did not expect grant ", grantDesc.getGrant());
     Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
     Assert.assertEquals(currentUser, grantDesc.getGrantor());
-    Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType());
     for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
       Assert.assertEquals(ROLE, role);
     }
-    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
+    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.ROLE, principal.getType());
       Assert.assertEquals(ROLE, principal.getName());
     }
@@ -360,17 +339,15 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testRevokeRoleGroup() throws Exception {
-    DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM GROUP " + GROUP);
-    GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL();
+    DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM GROUP " + GROUP);
+    RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc();
     Assert.assertNotNull("Grant should not be null", grantDesc);
-    Assert.assertFalse("Did not expect grant ", grantDesc.getGrant());
     Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
     Assert.assertEquals(currentUser, grantDesc.getGrantor());
-    Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType());
     for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
       Assert.assertEquals(ROLE, role);
     }
-    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) {
+    for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
       Assert.assertEquals(PrincipalType.GROUP, principal.getType());
       Assert.assertEquals(GROUP, principal.getName());
     }
@@ -380,10 +357,9 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testShowRoleGrantUser() throws Exception {
-    DDLWork work = analyze("SHOW ROLE GRANT USER " + USER);
-    RoleDDLDesc roleDesc = work.getRoleDDLDesc();
+    DDLWork2 work = analyze("SHOW ROLE GRANT USER " + USER);
+    ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Role should not be null", roleDesc);
-    Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT, roleDesc.getOperation());
     Assert.assertEquals(PrincipalType.USER, roleDesc.getPrincipalType());
     Assert.assertEquals(USER, roleDesc.getName());
   }
@@ -392,10 +368,9 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testShowRoleGrantRole() throws Exception {
-    DDLWork work = analyze("SHOW ROLE GRANT ROLE " + ROLE);
-    RoleDDLDesc roleDesc = work.getRoleDDLDesc();
+    DDLWork2 work = analyze("SHOW ROLE GRANT ROLE " + ROLE);
+    ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Role should not be null", roleDesc);
-    Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT, roleDesc.getOperation());
     Assert.assertEquals(PrincipalType.ROLE, roleDesc.getPrincipalType());
     Assert.assertEquals(ROLE, roleDesc.getName());
   }
@@ -404,10 +379,9 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testShowRoleGrantGroup() throws Exception {
-    DDLWork work = analyze("SHOW ROLE GRANT GROUP " + GROUP);
-    RoleDDLDesc roleDesc = work.getRoleDDLDesc();
+    DDLWork2 work = analyze("SHOW ROLE GRANT GROUP " + GROUP);
+    ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Role should not be null", roleDesc);
-    Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT, roleDesc.getOperation());
     Assert.assertEquals(PrincipalType.GROUP, roleDesc.getPrincipalType());
     Assert.assertEquals(GROUP, roleDesc.getName());
   }
@@ -416,8 +390,8 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testShowGrantUserOnTable() throws Exception {
-    DDLWork work = analyze("SHOW GRANT USER " + USER + " ON TABLE " + TABLE);
-    ShowGrantDesc grantDesc = work.getShowGrantDesc();
+    DDLWork2 work = analyze("SHOW GRANT USER " + USER + " ON TABLE " + TABLE);
+    ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Show grant should not be null", grantDesc);
     Assert.assertEquals(PrincipalType.USER, grantDesc.getPrincipalDesc().getType());
     Assert.assertEquals(USER, grantDesc.getPrincipalDesc().getName());
@@ -430,8 +404,8 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testShowGrantRoleOnTable() throws Exception {
-    DDLWork work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE);
-    ShowGrantDesc grantDesc = work.getShowGrantDesc();
+    DDLWork2 work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE);
+    ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Show grant should not be null", grantDesc);
     Assert.assertEquals(PrincipalType.ROLE, grantDesc.getPrincipalDesc().getType());
     Assert.assertEquals(ROLE, grantDesc.getPrincipalDesc().getName());
@@ -444,8 +418,8 @@ public class TestHiveAuthorizationTaskFactory {
    */
   @Test
   public void testShowGrantGroupOnTable() throws Exception {
-    DDLWork work = analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE " + TABLE);
-    ShowGrantDesc grantDesc = work.getShowGrantDesc();
+    DDLWork2 work = analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE " + TABLE);
+    ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc();
     Assert.assertNotNull("Show grant should not be null", grantDesc);
     Assert.assertEquals(PrincipalType.GROUP, grantDesc.getPrincipalDesc().getType());
     Assert.assertEquals(GROUP, grantDesc.getPrincipalDesc().getName());
@@ -482,7 +456,7 @@ public class TestHiveAuthorizationTaskFactory {
     }
   }
 
-  private DDLWork analyze(String command) throws Exception {
+  private DDLWork2 analyze(String command) throws Exception {
     return AuthorizationTestUtil.analyze(command, queryState, db);
   }
 
diff --git a/ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out b/ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out
index f328beb..d72cb25 100644
--- a/ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out
+++ b/ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out
@@ -4,4 +4,4 @@ POSTHOOK: query: set role ADMIN
 POSTHOOK: type: SHOW_ROLES
 PREHOOK: query: create role default
 PREHOOK: type: CREATEROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Role name cannot be one of the reserved roles: [ALL, DEFAULT, NONE]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Role name cannot be one of the reserved roles: [ALL, DEFAULT, NONE]
diff --git a/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out b/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out
index 8bc747e..f1b469f 100644
--- a/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out
+++ b/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out
@@ -55,4 +55,4 @@ public
 testrole
 PREHOOK: query: create role TESTRoLE
 PREHOOK: type: CREATEROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error create role: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException Role testrole already exists.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error create role: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException Role testrole already exists.
diff --git a/ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out b/ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out
index 981c8cd..9faf5bc 100644
--- a/ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out
+++ b/ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: create role r1
 PREHOOK: type: CREATEROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_test_user is not allowed to add roles. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed to add roles. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out b/ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out
index 8383f52..e5474ac 100644
--- a/ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out
+++ b/ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out
@@ -4,4 +4,4 @@ POSTHOOK: query: set role admin
 POSTHOOK: type: SHOW_ROLES
 PREHOOK: query: drop role admin
 PREHOOK: type: DROPROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error dropping role: public,admin roles can't be dropped.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error dropping role: public,admin roles can't be dropped.
diff --git a/ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out b/ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out
index 637167b..e03796d 100644
--- a/ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out
+++ b/ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out
@@ -22,4 +22,4 @@ POSTHOOK: type: SHOW_ROLES
 public
 PREHOOK: query: drop role r1
 PREHOOK: type: DROPROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_admin_user is not allowed to drop role. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_admin_user is not allowed to drop role. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_fail_1.q.out b/ql/src/test/results/clientnegative/authorization_fail_1.q.out
index fc52cb3..1fba5cf 100644
--- a/ql/src/test/results/clientnegative/authorization_fail_1.q.out
+++ b/ql/src/test/results/clientnegative/authorization_fail_1.q.out
@@ -15,4 +15,4 @@ POSTHOOK: Output: default@authorization_fail_1
 PREHOOK: query: grant Create on table authorization_fail_1 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@authorization_fail_1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException CREATE is already granted on table [default,authorization_fail_1] by hive_test_user)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException CREATE is already granted on table [default,authorization_fail_1] by hive_test_user)
diff --git a/ql/src/test/results/clientnegative/authorization_fail_8.q.out b/ql/src/test/results/clientnegative/authorization_fail_8.q.out
index e1ed1ad..adfe2d2 100644
--- a/ql/src/test/results/clientnegative/authorization_fail_8.q.out
+++ b/ql/src/test/results/clientnegative/authorization_fail_8.q.out
@@ -43,4 +43,4 @@ default	authorization_fail			user2	USER	SELECT	false	-1	user1
 PREHOOK: query: GRANT SELECT ON authorization_fail TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@authorization_fail
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.authorization_fail]]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.authorization_fail]]
diff --git a/ql/src/test/results/clientnegative/authorization_grant_group.q.out b/ql/src/test/results/clientnegative/authorization_grant_group.q.out
index 712a5ab..7707458 100644
--- a/ql/src/test/results/clientnegative/authorization_grant_group.q.out
+++ b/ql/src/test/results/clientnegative/authorization_grant_group.q.out
@@ -9,4 +9,4 @@ POSTHOOK: Output: default@table_gg
 PREHOOK: query: GRANT INSERT ON table_gg TO group g1
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@table_gg
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Invalid principal type in principal Principal [name=g1, type=GROUP]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid principal type in principal Principal [name=g1, type=GROUP]
diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out
index d613fe1..139517d 100644
--- a/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out
+++ b/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out
@@ -15,4 +15,4 @@ POSTHOOK: Output: default@table_priv_allf
 PREHOOK: query: GRANT ALL ON table_priv_allf TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@table_priv_allf
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf]]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf]]
diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out
index 795dc83..3e50f11 100644
--- a/ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out
+++ b/ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out
@@ -22,4 +22,4 @@ default	tauth_gdup			user1	USER	UPDATE	true	-1	user1
 PREHOOK: query: GRANT INSERT ON tauth_gdup TO USER user1
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@tauth_gdup
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException INSERT is already granted on table [default,tauth_gdup] by user1
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException INSERT is already granted on table [default,tauth_gdup] by user1
diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out
index 4dbb9e3..f4d362b 100644
--- a/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out
+++ b/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out
@@ -9,4 +9,4 @@ POSTHOOK: Output: default@table_priv_gfail1
 PREHOOK: query: GRANT INSERT ON table_priv_gfail1 TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@table_priv_gfail1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]]
diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out
index 0656ae5..d9e292f 100644
--- a/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out
+++ b/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out
@@ -15,4 +15,4 @@ POSTHOOK: Output: default@table_priv_gfail1
 PREHOOK: query: GRANT INSERT ON table_priv_gfail1 TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@table_priv_gfail1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]]
diff --git a/ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out b/ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out
index d390d90..5c319c6 100644
--- a/ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out
+++ b/ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out
@@ -9,4 +9,4 @@ POSTHOOK: Output: default@authorization_invalid_v2
 PREHOOK: query: grant lock on table authorization_invalid_v2 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@authorization_invalid_v2
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unsupported privilege type LOCK
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unsupported privilege type LOCK
diff --git a/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out b/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out
index 330a06c..ebfa03c 100644
--- a/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out
+++ b/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out
@@ -61,4 +61,4 @@ POSTHOOK: type: SHOW_ROLES
 PREHOOK: query: grant all on table tpriv_current_role to user user5
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@tpriv_current_role
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role]]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role]]
diff --git a/ql/src/test/results/clientnegative/authorization_public_create.q.out b/ql/src/test/results/clientnegative/authorization_public_create.q.out
index 7defa82..5aaf75d 100644
--- a/ql/src/test/results/clientnegative/authorization_public_create.q.out
+++ b/ql/src/test/results/clientnegative/authorization_public_create.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: create role public
 PREHOOK: type: CREATEROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implicitly exists. It can't be created.)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implicitly exists. It can't be created.)
diff --git a/ql/src/test/results/clientnegative/authorization_public_drop.q.out b/ql/src/test/results/clientnegative/authorization_public_drop.q.out
index 6aaa1ff..003a5f1 100644
--- a/ql/src/test/results/clientnegative/authorization_public_drop.q.out
+++ b/ql/src/test/results/clientnegative/authorization_public_drop.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: drop role public
 PREHOOK: type: DROPROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.)
diff --git a/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out b/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out
index 61fa52a..bca1b92 100644
--- a/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out
+++ b/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out
@@ -15,5 +15,5 @@ POSTHOOK: Output: default@table_priv_rfail1
 PREHOOK: query: REVOKE INSERT ON TABLE table_priv_rfail1 FROM USER user2
 PREHOOK: type: REVOKE_PRIVILEGE
 PREHOOK: Output: default@table_priv_rfail1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfail1] granted by user3
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfail1] granted by user3
 
diff --git a/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out b/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out
index 1b41d61..ee2a2a5 100644
--- a/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out
+++ b/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out
@@ -27,5 +27,5 @@ POSTHOOK: Output: default@table_priv_rfai2
 PREHOOK: query: REVOKE INSERT ON TABLE table_priv_rfai2 FROM USER user2
 PREHOOK: type: REVOKE_PRIVILEGE
 PREHOOK: Output: default@table_priv_rfai2
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfai2] granted by user3
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfai2] granted by user3
 
diff --git a/ql/src/test/results/clientnegative/authorization_role_case.q.out b/ql/src/test/results/clientnegative/authorization_role_case.q.out
index 4908e5d..7b07d1b 100644
--- a/ql/src/test/results/clientnegative/authorization_role_case.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_case.q.out
@@ -31,4 +31,4 @@ POSTHOOK: Output: default@t1
 PREHOOK: query: grant UPDATE  on table t1 to role mixcaserole2
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@t1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role mixcaserole2 does not exist)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role mixcaserole2 does not exist)
diff --git a/ql/src/test/results/clientnegative/authorization_role_cycles1.q.out b/ql/src/test/results/clientnegative/authorization_role_cycles1.q.out
index 9303c7e..2085067 100644
--- a/ql/src/test/results/clientnegative/authorization_role_cycles1.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_cycles1.q.out
@@ -16,4 +16,4 @@ POSTHOOK: query: grant role role1 to role role2
 POSTHOOK: type: GRANT_ROLE
 PREHOOK: query: grant role role2 to role role1
 PREHOOK: type: GRANT_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting role: Cannot grant role role1 to role2 as role2 already belongs to the role role1. (no cycles allowed)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Cannot grant role role1 to role2 as role2 already belongs to the role role1. (no cycles allowed)
diff --git a/ql/src/test/results/clientnegative/authorization_role_cycles2.q.out b/ql/src/test/results/clientnegative/authorization_role_cycles2.q.out
index df27bee..cf6b391 100644
--- a/ql/src/test/results/clientnegative/authorization_role_cycles2.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_cycles2.q.out
@@ -40,4 +40,4 @@ POSTHOOK: query: grant role role5 to role role4
 POSTHOOK: type: GRANT_ROLE
 PREHOOK: query: grant role role2 to role role4
 PREHOOK: type: GRANT_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting role: Cannot grant role role4 to role2 as role2 already belongs to the role role4. (no cycles allowed)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Cannot grant role role4 to role2 as role2 already belongs to the role role4. (no cycles allowed)
diff --git a/ql/src/test/results/clientnegative/authorization_role_grant.q.out b/ql/src/test/results/clientnegative/authorization_role_grant.q.out
index cb79651..951b050 100644
--- a/ql/src/test/results/clientnegative/authorization_role_grant.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_grant.q.out
@@ -31,4 +31,4 @@ POSTHOOK: query: set role role_noadmin
 POSTHOOK: type: SHOW_ROLES
 PREHOOK: query: grant  src_role_wadmin to user user3
 PREHOOK: type: GRANT_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_role_grant2.q.out b/ql/src/test/results/clientnegative/authorization_role_grant2.q.out
index ade6752..7beef28 100644
--- a/ql/src/test/results/clientnegative/authorization_role_grant2.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_grant2.q.out
@@ -48,4 +48,4 @@ POSTHOOK: query: set role src_role_wadmin
 POSTHOOK: type: SHOW_ROLES
 PREHOOK: query: grant  src_role_wadmin to user user3
 PREHOOK: type: GRANT_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out b/ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out
index 481842c..34675bf 100644
--- a/ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out
@@ -8,4 +8,4 @@ POSTHOOK: query: create role role1
 POSTHOOK: type: CREATEROLE
 PREHOOK: query: grant role1 to role nosuchrole
 PREHOOK: type: GRANT_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting role: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist
diff --git a/ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out b/ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out
index 144b787..3a0760d 100644
--- a/ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out
@@ -8,4 +8,4 @@ POSTHOOK: query: create role accounting
 POSTHOOK: type: CREATEROLE
 PREHOOK: query: show role grant role accounting
 PREHOOK: type: SHOW_ROLE_GRANT
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error getting role grant information for user accounting: User : user1 is not allowed check privileges of a role it does not belong to : accounting. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error getting role grant information for user accounting: User : user1 is not allowed check privileges of a role it does not belong to : accounting. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out b/ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out
index a0c6845..0da86c9 100644
--- a/ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out
+++ b/ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out
@@ -19,4 +19,4 @@ POSTHOOK: type: SHOW_ROLE_GRANT
 public	false	-1	
 PREHOOK: query: show role grant user ruser2
 PREHOOK: type: SHOW_ROLE_GRANT
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error getting role grant information for user ruser2: User : ruser1 is not allowed check privileges of another user : ruser2. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error getting role grant information for user ruser2: User : ruser1 is not allowed check privileges of another user : ruser2. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out b/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out
index ee056be..56d6b7e 100644
--- a/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out
+++ b/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: set role nosuchroleexists
 PREHOOK: type: SHOW_ROLES
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. hive_test_user doesn't belong to role nosuchroleexists
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. hive_test_user doesn't belong to role nosuchroleexists
diff --git a/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out b/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out
index 539ce39..0396a1f 100644
--- a/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out
+++ b/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out
@@ -20,4 +20,4 @@ POSTHOOK: query: set role public
 POSTHOOK: type: SHOW_ROLES
 PREHOOK: query: set role nosuchroleexists
 PREHOOK: type: SHOW_ROLES
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. user2 doesn't belong to role nosuchroleexists
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. user2 doesn't belong to role nosuchroleexists
diff --git a/ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out b/ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out
index 736e693..d8dad36 100644
--- a/ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out
+++ b/ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out
@@ -8,4 +8,4 @@ POSTHOOK: query: create role role1
 POSTHOOK: type: CREATEROLE
 PREHOOK: query: show grant role role1
 PREHOOK: type: SHOW_GRANT
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of a role it does not belong to : role1. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of a role it does not belong to : role1. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out b/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out
index 9adbd09..74e93d9 100644
--- a/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out
+++ b/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: show grant
 PREHOOK: type: SHOW_GRANT
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error showing privileges: User : user1 has to specify a user name or role in the show grant. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 has to specify a user name or role in the show grant. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out b/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out
index dea2264..b3aee24 100644
--- a/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out
+++ b/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: show grant user user2
 PREHOOK: type: SHOW_GRANT
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out b/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out
index 47d5c7a..cadbe11 100644
--- a/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out
+++ b/ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out
@@ -8,4 +8,4 @@ POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t1
 PREHOOK: query: show grant user user2 on table t1
 PREHOOK: type: SHOW_GRANT
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out b/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out
index 8be27b5..ee0fef1 100644
--- a/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out
+++ b/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: show principals role1
 PREHOOK: type: SHOW_ROLE_PRINCIPALS
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_test_user is not allowed get principals in a role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed get principals in a role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out b/ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out
index adc2788..7cd4bb1 100644
--- a/ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out
+++ b/ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: show roles
 PREHOOK: type: SHOW_ROLES
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_test_user is not allowed to list roles. User has to belong to ADMIN role and have it as current role, for this action.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed to list roles. User has to belong to ADMIN role and have it as current role, for this action.
diff --git a/ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out b/ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out
index 6eef774..506088d 100644
--- a/ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out
+++ b/ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out
@@ -9,4 +9,4 @@ POSTHOOK: Output: default@t1
 PREHOOK: query: grant ALL on t1 to role nosuchrole
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@t1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist
diff --git a/ql/src/test/results/clientnegative/authorize_grant_public.q.out b/ql/src/test/results/clientnegative/authorize_grant_public.q.out
index 6872b78..a25fa01 100644
--- a/ql/src/test/results/clientnegative/authorize_grant_public.q.out
+++ b/ql/src/test/results/clientnegative/authorize_grant_public.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: grant role public to user hive_test_user
 PREHOOK: type: GRANT_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implicitly belong to public role.)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implicitly belong to public role.)
diff --git a/ql/src/test/results/clientnegative/authorize_revoke_public.q.out b/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
index ede7487..af3fbcb 100644
--- a/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
+++ b/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
@@ -1,3 +1,3 @@
 PREHOOK: query: revoke role public from user hive_test_user
 PREHOOK: type: REVOKE_ROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.)
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
index 235f8c9..1ad26b5 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
@@ -454,6 +454,7 @@ POSTHOOK: query: explain analyze grant select on table src_autho_test_n4 to user
 POSTHOOK: type: GRANT_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n4
 Stage-0
+  Grant{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n4"},"Privileges:":[{"Privilege":{"privilege:":{"type:":"Select"}}}],"grant option:":"false"}
 
 PREHOOK: query: grant select on table src_autho_test_n4 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
@@ -473,6 +474,7 @@ Stage-1
   Fetch Operator
     limit:-1
     Stage-0
+      Show grant desc{"privilege subject":{"object:":"default.src_autho_test_n4"},"principal desc:":{"Principal":{"name:":"hive_test_user"}}}
 
 PREHOOK: query: show grant user hive_test_user on table src_autho_test_n4(key)
 PREHOOK: type: SHOW_GRANT
@@ -486,6 +488,7 @@ Stage-1
   Fetch Operator
     limit:-1
     Stage-0
+      Show grant desc{"privilege subject":{"object:":"default.src_autho_test_n4"},"principal desc:":{"Principal":{"name:":"hive_test_user"}}}
 
 PREHOOK: query: select key from src_autho_test_n4 order by key limit 20
 PREHOOK: type: QUERY
@@ -528,6 +531,7 @@ POSTHOOK: query: explain analyze revoke select on table src_autho_test_n4 from u
 POSTHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n4
 Stage-0
+  Revoke{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n4"},"Privileges:":[{"Privilege":{"privilege:":{"type:":"Select"}}}]}
 
 PREHOOK: query: grant select(key) on table src_autho_test_n4 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
@@ -542,6 +546,7 @@ POSTHOOK: query: explain analyze grant select(key) on table src_autho_test_n4 to
 POSTHOOK: type: GRANT_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n4
 Stage-0
+  Grant{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n4"},"Privileges:":[{"Privilege":{"columns:":["key"],"privilege:":{"type:":"Select"}}}],"grant option:":"false"}
 
 PREHOOK: query: revoke select(key) on table src_autho_test_n4 from user hive_test_user
 PREHOOK: type: REVOKE_PRIVILEGE
@@ -556,6 +561,7 @@ POSTHOOK: query: explain analyze revoke select(key) on table src_autho_test_n4 f
 POSTHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n4
 Stage-0
+  Revoke{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n4"},"Privileges:":[{"Privilege":{"columns:":["key"],"privilege:":{"type:":"Select"}}}]}
 
 PREHOOK: query: create role sRc_roLE
 PREHOOK: type: CREATEROLE
@@ -568,6 +574,7 @@ POSTHOOK: query: explain analyze
 create role sRc_roLE
 POSTHOOK: type: CREATEROLE
 Stage-0
+  Create Role{"name:":"sRc_roLE"}
 
 PREHOOK: query: create role sRc_roLE
 PREHOOK: type: CREATEROLE
@@ -584,6 +591,7 @@ POSTHOOK: query: explain analyze
 grant role sRc_roLE to user hive_test_user
 POSTHOOK: type: GRANT_ROLE
 Stage-0
+  Grant roles{"principals:":[{"Principal":{"name:":"hive_test_user"}}],"roles:":["sRc_roLE"]}
 
 PREHOOK: query: grant role sRc_roLE to user hive_test_user
 PREHOOK: type: GRANT_ROLE
@@ -601,6 +609,7 @@ Stage-1
   Fetch Operator
     limit:-1
     Stage-0
+      Show Role Grant{"name:":"hive_test_user"}
 
 PREHOOK: query: drop role sRc_roLE
 PREHOOK: type: DROPROLE
@@ -611,6 +620,7 @@ PREHOOK: type: DROPROLE
 POSTHOOK: query: explain analyze drop role sRc_roLE
 POSTHOOK: type: DROPROLE
 Stage-0
+  Drop Role{"name:":"sRc_roLE"}
 
 PREHOOK: query: drop role sRc_roLE
 PREHOOK: type: DROPROLE
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index 40d1c32..c07c6a3 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@ -378,6 +378,7 @@ POSTHOOK: query: explain grant select on table src_autho_test_n3 to user hive_te
 POSTHOOK: type: GRANT_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n3
 Stage-0
+  Grant{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n3"},"Privileges:":[{"Privilege":{"privilege:":{"type:":"Select"}}}],"grant option:":"false"}
 
 PREHOOK: query: grant select on table src_autho_test_n3 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
@@ -393,6 +394,7 @@ Stage-1
   Fetch Operator
     limit:-1
     Stage-0
+      Show grant desc{"privilege subject":{"object:":"default.src_autho_test_n3"},"principal desc:":{"Principal":{"name:":"hive_test_user"}}}
 
 PREHOOK: query: explain show grant user hive_test_user on table src_autho_test_n3(key)
 PREHOOK: type: SHOW_GRANT
@@ -402,6 +404,7 @@ Stage-1
   Fetch Operator
     limit:-1
     Stage-0
+      Show grant desc{"privilege subject":{"object:":"default.src_autho_test_n3"},"principal desc:":{"Principal":{"name:":"hive_test_user"}}}
 
 PREHOOK: query: select key from src_autho_test_n3 order by key limit 20
 PREHOOK: type: QUERY
@@ -438,6 +441,7 @@ POSTHOOK: query: explain revoke select on table src_autho_test_n3 from user hive
 POSTHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n3
 Stage-0
+  Revoke{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n3"},"Privileges:":[{"Privilege":{"privilege:":{"type:":"Select"}}}]}
 
 PREHOOK: query: explain grant select(key) on table src_autho_test_n3 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
@@ -446,6 +450,7 @@ POSTHOOK: query: explain grant select(key) on table src_autho_test_n3 to user hi
 POSTHOOK: type: GRANT_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n3
 Stage-0
+  Grant{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n3"},"Privileges:":[{"Privilege":{"columns:":["key"],"privilege:":{"type:":"Select"}}}],"grant option:":"false"}
 
 PREHOOK: query: explain revoke select(key) on table src_autho_test_n3 from user hive_test_user
 PREHOOK: type: REVOKE_PRIVILEGE
@@ -454,6 +459,7 @@ POSTHOOK: query: explain revoke select(key) on table src_autho_test_n3 from user
 POSTHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: Output: default@src_autho_test_n3
 Stage-0
+  Revoke{"Principals:":[{"Principal":{"name:":"hive_test_user"}}],"privilege subject":{"object:":"default.src_autho_test_n3"},"Privileges:":[{"Privilege":{"columns:":["key"],"privilege:":{"type:":"Select"}}}]}
 
 PREHOOK: query: explain 
 create role sRc_roLE
@@ -462,6 +468,7 @@ POSTHOOK: query: explain
 create role sRc_roLE
 POSTHOOK: type: CREATEROLE
 Stage-0
+  Create Role{"name:":"sRc_roLE"}
 
 PREHOOK: query: create role sRc_roLE
 PREHOOK: type: CREATEROLE
@@ -474,6 +481,7 @@ POSTHOOK: query: explain
 grant role sRc_roLE to user hive_test_user
 POSTHOOK: type: GRANT_ROLE
 Stage-0
+  Grant roles{"principals:":[{"Principal":{"name:":"hive_test_user"}}],"roles:":["sRc_roLE"]}
 
 PREHOOK: query: grant role sRc_roLE to user hive_test_user
 PREHOOK: type: GRANT_ROLE
@@ -487,12 +495,14 @@ Stage-1
   Fetch Operator
     limit:-1
     Stage-0
+      Show Role Grant{"name:":"hive_test_user"}
 
 PREHOOK: query: explain drop role sRc_roLE
 PREHOOK: type: DROPROLE
 POSTHOOK: query: explain drop role sRc_roLE
 POSTHOOK: type: DROPROLE
 Stage-0
+  Drop Role{"name:":"sRc_roLE"}
 
 PREHOOK: query: drop role sRc_roLE
 PREHOOK: type: DROPROLE