You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/02/08 16:33:30 UTC

svn commit: r1566029 - in /hive/trunk: cli/src/java/org/apache/hadoop/hive/cli/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/ ql/src/java/org/apache/h...

Author: hashutosh
Date: Sat Feb  8 15:33:29 2014
New Revision: 1566029

URL: http://svn.apache.org/r1566029
Log:
HIVE-5930 : SQL std auth - implement set roles, show current roles (Ashutosh Chauhan via Thejas Nair)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q
    hive/trunk/ql/src/test/results/clientpositive/authorization_set_show_current_role.q.out
Modified:
    hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java

Modified: hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Sat Feb  8 15:33:29 2014
@@ -216,7 +216,7 @@ public class CliDriver {
       }
     } else { // local mode
       try {
-        CommandProcessor proc = CommandProcessorFactory.get(tokens[0], (HiveConf) conf);
+        CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf);
         ret = processLocalCmd(cmd, proc, ss);
       } catch (SQLException e) {
         console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(),
@@ -579,8 +579,9 @@ public class CliDriver {
     // We stack a custom Completor on top of our ArgumentCompletor
     // to reverse this.
     Completor completor = new Completor () {
+      @Override
       public int complete (String buffer, int offset, List completions) {
-        List<String> comp = (List<String>) completions;
+        List<String> comp = completions;
         int ret = ac.complete(buffer, offset, completions);
         // ConsoleReader will do the substitution if and only if there
         // is exactly one valid completion, so we ignore other cases.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Sat Feb  8 15:33:29 2014
@@ -657,7 +657,7 @@ public class DDLTask extends Task<DDLWor
         //only grantInfo is used
         HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(new HiveObjectRef(
           AuthorizationUtils.getThriftHiveObjType(privObj.getType()),privObj.getDbname(),
-          privObj.getTableviewname(),null,null), principal.getName(), 
+          privObj.getTableviewname(),null,null), principal.getName(),
           AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
         privList.add(thriftObjectPriv);
       }
@@ -970,6 +970,17 @@ public class DDLTask extends Task<DDLWor
       List<String> allRoles = authorizer.getAllRoles();
       writeListToFile(allRoles, roleDDLDesc.getResFile());
       break;
+    case SHOW_CURRENT_ROLE:
+      List<HiveRole> currentRoles = authorizer.getCurrentRoles();
+      List<String> roleNames = new ArrayList<String>(currentRoles.size());
+      for (HiveRole role : currentRoles) {
+        roleNames.add(role.getRoleName());
+      }
+      writeListToFile(roleNames, roleDDLDesc.getResFile());
+      break;
+    case SET_ROLE:
+      authorizer.setCurrentRole(roleDDLDesc.getName());
+      break;
     default:
       throw new HiveException("Unkown role operation "
           + operation.getOperationName());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Sat Feb  8 15:33:29 2014
@@ -459,11 +459,33 @@ public class DDLSemanticAnalyzer extends
    case HiveParser.TOK_EXCHANGEPARTITION:
       analyzeExchangePartition(ast);
       break;
+   case HiveParser.TOK_SHOW_SET_ROLE:
+     analyzeSetShowRole(ast);
+     break;
     default:
       throw new SemanticException("Unsupported command.");
     }
   }
 
+  private void analyzeSetShowRole(ASTNode ast) throws SemanticException {
+    switch (ast.getChildCount()) {
+      case 0:
+        ctx.setResFile(ctx.getLocalTmpPath());
+        rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask(
+        getInputs(), getOutputs(), ctx.getResFile()));
+        setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema()));
+        break;
+      case 1:
+        rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask(
+        BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()),
+        getInputs(), getOutputs()));
+        break;
+      default:
+        throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. "
+        + ast.dump());
+    }
+  }
+
   private void analyzeGrantRevokeRole(boolean grant, ASTNode ast) throws SemanticException {
     Task<? extends Serializable> task;
     if(grant) {
@@ -940,7 +962,7 @@ public class DDLSemanticAnalyzer extends
         break;
       case HiveParser.TOK_CREATEINDEX_INDEXTBLNAME:
         ASTNode ch = (ASTNode) child.getChild(0);
-        indexTableName = getUnescapedName((ASTNode) ch);
+        indexTableName = getUnescapedName(ch);
         break;
       case HiveParser.TOK_DEFERRED_REBUILDINDEX:
         deferredRebuild = true;
@@ -2120,7 +2142,7 @@ public class DDLSemanticAnalyzer extends
       for (int i = 0; i < ast.getChildCount(); i++) {
         ASTNode child = (ASTNode) ast.getChild(i);
         if (child.getType() == HiveParser.TOK_TABTYPE) {
-          ASTNode tableTypeExpr = (ASTNode) child;
+          ASTNode tableTypeExpr = child;
           tableName =
             QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0));
           // get partition metadata if partition specified
@@ -2345,7 +2367,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeAlterTableRenamePart(ASTNode ast, String tblName,
       HashMap<String, String> oldPartSpec) throws SemanticException {
-    Map<String, String> newPartSpec = extractPartitionSpecs((ASTNode) ast.getChild(0));
+    Map<String, String> newPartSpec = extractPartitionSpecs(ast.getChild(0));
     if (newPartSpec == null) {
       throw new SemanticException("RENAME PARTITION Missing Destination" + ast);
     }
@@ -2514,7 +2536,7 @@ public class DDLSemanticAnalyzer extends
         }
         currentPart = getPartSpec(child);
         validatePartitionValues(currentPart); // validate reserved values
-        validatePartSpec(tab, currentPart, (ASTNode)child, conf, true);
+        validatePartSpec(tab, currentPart, child, conf, true);
         break;
       case HiveParser.TOK_PARTITIONLOCATION:
         // if location specified, set in partition

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sat Feb  8 15:33:29 2014
@@ -281,6 +281,7 @@ TOK_GRANT_ROLE;
 TOK_REVOKE_ROLE;
 TOK_SHOW_ROLE_GRANT;
 TOK_SHOW_ROLES;
+TOK_SHOW_SET_ROLE;
 TOK_SHOWINDEXES;
 TOK_SHOWDBLOCKS;
 TOK_INDEXCOMMENT;
@@ -671,6 +672,8 @@ ddlStatement
     | showRoles
     | grantRole
     | revokeRole
+    | setRole
+    | showCurrentRole
     ;
 
 ifExists
@@ -1376,6 +1379,20 @@ showRoles
     -> ^(TOK_SHOW_ROLES)
     ;
 
+showCurrentRole
+@init {pushMsg("show current role", state);}
+@after {popMsg(state);}
+    : KW_SHOW KW_CURRENT KW_ROLES
+    -> ^(TOK_SHOW_SET_ROLE)
+    ;
+
+setRole
+@init {pushMsg("set role", state);}
+@after {popMsg(state);}
+    : KW_SET KW_ROLE roleName=identifier
+    -> ^(TOK_SHOW_SET_ROLE $roleName)
+    ;
+
 showGrants
 @init {pushMsg("show grants", state);}
 @after {popMsg(state);}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Sat Feb  8 15:33:29 2014
@@ -96,6 +96,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_GRANT_ROLE, HiveOperation.GRANT_ROLE);
     commandType.put(HiveParser.TOK_REVOKE_ROLE, HiveOperation.REVOKE_ROLE);
     commandType.put(HiveParser.TOK_SHOW_ROLES, HiveOperation.SHOW_ROLES);
+    commandType.put(HiveParser.TOK_SHOW_SET_ROLE, HiveOperation.SHOW_ROLES);
     commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT);
     commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE);
     commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE);
@@ -215,6 +216,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_ALTERTABLE_SKEWED:
       case HiveParser.TOK_TRUNCATETABLE:
       case HiveParser.TOK_EXCHANGEPARTITION:
+      case HiveParser.TOK_SHOW_SET_ROLE:
         return new DDLSemanticAnalyzer(conf);
       case HiveParser.TOK_ALTERTABLE_PARTITION:
         HiveOperation commandType = null;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java Sat Feb  8 15:33:29 2014
@@ -56,4 +56,10 @@ public interface HiveAuthorizationTaskFa
 
   public Task<? extends Serializable> createRevokeTask(ASTNode node, HashSet<ReadEntity> inputs,
       HashSet<WriteEntity> outputs) throws SemanticException;
+
+  public Task<? extends Serializable> createSetRoleTask(String roleName,
+      HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException;
+
+  public Task<? extends Serializable> createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
+      HashSet<WriteEntity> outputs, Path resFile) throws SemanticException;
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Sat Feb  8 15:33:29 2014
@@ -370,4 +370,21 @@ public class HiveAuthorizationTaskFactor
   private String toMessage(ErrorMsg message, Object detail) {
     return detail == null ? message.getMsg() : message.getMsg(detail.toString());
   }
+
+  @Override
+  public Task<? extends Serializable> createSetRoleTask(String roleName,
+      HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs)
+      throws SemanticException {
+    return TaskFactory.get(new DDLWork(inputs, outputs, new RoleDDLDesc(roleName,
+      RoleDDLDesc.RoleOperation.SET_ROLE)), conf);
+  }
+
+  @Override
+  public Task<? extends Serializable> createShowCurrentRoleTask(
+      HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, Path resFile)
+      throws SemanticException {
+    RoleDDLDesc ddlDesc = new RoleDDLDesc(null, RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE);
+    ddlDesc.setResFile(resFile.toString());
+    return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc), conf);
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java Sat Feb  8 15:33:29 2014
@@ -28,15 +28,15 @@ public class RoleDDLDesc extends DDLDesc
   private static final long serialVersionUID = 1L;
 
   private String name;
-  
+
   private PrincipalType principalType;
-  
+
   private boolean group;
 
   private RoleOperation operation;
-  
+
   private String resFile;
-  
+
   private String roleOwnerName;
 
   /**
@@ -60,7 +60,8 @@ public class RoleDDLDesc extends DDLDesc
   }
 
   public static enum RoleOperation {
-    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"), SHOW_ROLES("show_roles");
+    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"),
+    SHOW_ROLES("show_roles"), SET_ROLE("set_role"), SHOW_CURRENT_ROLE("show_current_role");
     private String operationName;
 
     private RoleOperation() {
@@ -74,11 +75,12 @@ public class RoleDDLDesc extends DDLDesc
       return operationName;
     }
 
+    @Override
     public String toString () {
       return this.operationName;
     }
   }
-  
+
   public RoleDDLDesc(){
   }
 
@@ -102,7 +104,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setName(String roleName) {
     this.name = roleName;
   }
-  
+
   @Explain(displayName = "role operation")
   public RoleOperation getOperation() {
     return operation;
@@ -111,7 +113,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setOperation(RoleOperation operation) {
     this.operation = operation;
   }
-  
+
   public PrincipalType getPrincipalType() {
     return principalType;
   }
@@ -127,7 +129,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setGroup(boolean group) {
     this.group = group;
   }
-  
+
   public String getResFile() {
     return resFile;
   }
@@ -135,7 +137,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setResFile(String resFile) {
     this.resFile = resFile;
   }
-  
+
   public String getRoleOwnerName() {
     return roleOwnerName;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Sat Feb  8 15:33:29 2014
@@ -45,13 +45,13 @@ public final class CommandProcessorFacto
 
   public static CommandProcessor get(String cmd)
       throws SQLException {
-    return get(cmd, null);
+    return get(new String[]{cmd}, null);
   }
 
-  public static CommandProcessor getForHiveCommand(String cmd, HiveConf conf)
+  public static CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf)
       throws SQLException {
     HiveCommand hiveCommand = HiveCommand.find(cmd);
-    if (hiveCommand == null || isBlank(cmd)) {
+    if (hiveCommand == null || isBlank(cmd[0])) {
       return null;
     }
     if (conf == null) {
@@ -61,8 +61,8 @@ public final class CommandProcessorFacto
     for (String availableCommand : conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST).split(",")) {
       availableCommands.add(availableCommand.toLowerCase().trim());
     }
-    if (!availableCommands.contains(cmd.trim().toLowerCase())) {
-      throw new SQLException("Insufficient privileges to execute " + cmd, "42000");
+    if (!availableCommands.contains(cmd[0].trim().toLowerCase())) {
+      throw new SQLException("Insufficient privileges to execute " + cmd[0], "42000");
     }
     switch (hiveCommand) {
       case SET:
@@ -83,13 +83,13 @@ public final class CommandProcessorFacto
     }
   }
 
-  public static CommandProcessor get(String cmd, HiveConf conf)
+  public static CommandProcessor get(String[] cmd, HiveConf conf)
       throws SQLException {
     CommandProcessor result = getForHiveCommand(cmd, conf);
     if (result != null) {
       return result;
     }
-    if (isBlank(cmd)) {
+    if (isBlank(cmd[0])) {
       return null;
     } else {
       if (conf == null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java Sat Feb  8 15:33:29 2014
@@ -38,11 +38,18 @@ public enum HiveCommand {
       COMMANDS.add(command.name());
     }
   }
-  public static HiveCommand find(String command) {
-    if (command != null) {
-      command = command.trim().toUpperCase();
-      if (COMMANDS.contains(command)) {
-        return HiveCommand.valueOf(command);
+  public static HiveCommand find(String[] command) {
+    if (null == command){
+      return null;
+    }
+    String cmd = command[0];
+    if (cmd != null) {
+      cmd = cmd.trim().toUpperCase();
+      if (command.length > 1 && "role".equalsIgnoreCase(command[1])) {
+        // special handling for set role r1 statement
+        return null;
+      } else if (COMMANDS.contains(cmd)) {
+        return HiveCommand.valueOf(cmd);
       }
     }
     return null;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java Sat Feb  8 15:33:29 2014
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.securi
 
 import java.util.List;
 
+import org.apache.hadoop.hive.metastore.api.Role;
+
 /**
  * Interface that is invoked by access control commands, including grant/revoke role/privileges,
  * create/drop roles, and commands to read the state of authorization rules.
@@ -57,4 +59,7 @@ public interface HiveAccessController {
   List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
       throws HiveAuthorizationPluginException;
 
+  void setCurrentRole(String roleName) throws HiveAuthorizationPluginException;
+
+  List<HiveRole> getCurrentRoles() throws HiveAuthorizationPluginException;
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java Sat Feb  8 15:33:29 2014
@@ -21,6 +21,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.metastore.api.Role;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 
 /**
@@ -150,7 +151,9 @@ public interface HiveAuthorizer {
   List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
       throws HiveAuthorizationPluginException;
 
+  void setCurrentRole(String roleName) throws HiveAuthorizationPluginException;
 
+  List<HiveRole> getCurrentRoles() throws HiveAuthorizationPluginException;
   //other functions to be added -
   //showUsersInRole(rolename)
   //isSuperuser(username)

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java Sat Feb  8 15:33:29 2014
@@ -39,5 +39,5 @@ public interface HiveAuthorizerFactory {
    * @return new instance of HiveAuthorizer
    */
   HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
-      HiveConf conf, String hiveCurrentUser);
+      HiveConf conf, String hiveCurrentUser) throws HiveAuthorizationPluginException;
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java Sat Feb  8 15:33:29 2014
@@ -21,6 +21,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.metastore.api.Role;
 
 /**
  * Convenience implementation of HiveAuthorizer.
@@ -104,6 +105,16 @@ public class HiveAuthorizerImpl implemen
     return VERSION.V1;
   }
 
+  @Override
+  public void setCurrentRole(String roleName) throws HiveAuthorizationPluginException {
+    accessController.setCurrentRole(roleName);
+  }
+
+  @Override
+  public List<HiveRole> getCurrentRoles() throws HiveAuthorizationPluginException {
+    return accessController.getCurrentRoles();
+  }
+
 
  // other access control functions
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java Sat Feb  8 15:33:29 2014
@@ -26,11 +26,12 @@ import java.util.Set;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.Role;
@@ -46,29 +47,55 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole;
 
-
 /**
  * Implements functionality of access control statements for sql standard based authorization
  */
 @Private
 public class SQLStdHiveAccessController implements HiveAccessController {
 
-  private HiveMetastoreClientFactory metastoreClientFactory;
+  private final HiveMetastoreClientFactory metastoreClientFactory;
   private static final String [] SUPPORTED_PRIVS = {"INSERT", "UPDATE", "DELETE", "SELECT", "ALL"};
   private static final Set<String> SUPPORTED_PRIVS_SET
     = new HashSet<String>(Arrays.asList(SUPPORTED_PRIVS));
-
+  private final List<HiveRole> currentRoles;
+  private final String currentUserName;
+  private HiveRole adminRole;
 
   SQLStdHiveAccessController(HiveMetastoreClientFactory metastoreClientFactory,
-      HiveConf conf, String hiveCurrentUser){
-    this.metastoreClientFactory = metastoreClientFactory;
+      HiveConf conf, String hiveCurrentUser) throws HiveAuthorizationPluginException {
+    this.currentUserName = hiveCurrentUser;
+    try {
+      this.metastoreClientFactory = metastoreClientFactory;
+      this.currentRoles = getRolesFromMS();
+    } catch (HiveAuthorizationPluginException e) {
+      throw e;
+    }
   }
 
+  private List<HiveRole> getRolesFromMS() throws HiveAuthorizationPluginException {
+    List<Role> roles;
+    try {
+      roles = metastoreClientFactory.getHiveMetastoreClient().
+        list_roles(currentUserName, PrincipalType.USER);
+      List<HiveRole> currentRoles = new ArrayList<HiveRole>(roles.size());
+      for (Role role : roles) {
+        if (!HiveMetaStore.ADMIN.equalsIgnoreCase(role.getRoleName())) {
+          currentRoles.add(new HiveRole(role));
+        } else {
+          this.adminRole = new HiveRole(role);
+        }
+      }
+      return currentRoles;
+    } catch (Exception e) {
+        throw new HiveAuthorizationPluginException("Failed to retrieve roles for "+
+        currentUserName, e);
+    }
+  }
 
   @Override
   public void grantPrivileges(List<HivePrincipal> hivePrincipals,
-      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
-      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
+    List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+    HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
 
     PrivilegeBag privBag =
         getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal,
@@ -91,8 +118,8 @@ public class SQLStdHiveAccessController 
    * @throws HiveAuthorizationPluginException
    */
   private PrivilegeBag getThriftPrivilegesBag(List<HivePrincipal> hivePrincipals,
-      List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
-      HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
+    List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+    HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthorizationPluginException {
     HiveObjectRef privObj = getThriftHiveObjectRef(hivePrivObject);
     PrivilegeBag privBag = new PrivilegeBag();
     for(HivePrivilege privilege : hivePrivileges){
@@ -158,8 +185,8 @@ public class SQLStdHiveAccessController 
       throws HiveAuthorizationPluginException {
     try {
       String grantorName = adminGrantor == null ? null : adminGrantor.getName();
-      metastoreClientFactory.getHiveMetastoreClient()
-        .create_role(new Role(roleName, 0, grantorName));
+      metastoreClientFactory.getHiveMetastoreClient().create_role(
+        new Role(roleName, 0, grantorName));
     } catch (Exception e) {
       throw new HiveAuthorizationPluginException("Error create role", e);
     }
@@ -178,7 +205,7 @@ public class SQLStdHiveAccessController 
   public List<HiveRole> getRoles(HivePrincipal hivePrincipal) throws HiveAuthorizationPluginException {
     try {
       List<Role> roles = metastoreClientFactory.getHiveMetastoreClient().list_roles(
-          hivePrincipal.getName(), AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()));
+        hivePrincipal.getName(), AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()));
       List<HiveRole> roleNames = new ArrayList<HiveRole>(roles.size());
       for (Role role : roles){
         ;
@@ -197,14 +224,11 @@ public class SQLStdHiveAccessController 
     for(HivePrincipal hivePrincipal : hivePrincipals){
       for(String roleName : roleNames){
         try {
-          IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
-          mClient.grant_role(roleName,
-              hivePrincipal.getName(),
-              AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()),
-              grantorPrinc.getName(),
-              AuthorizationUtils.getThriftPrincipalType(grantorPrinc.getType()),
-              grantOption
-              );
+          metastoreClientFactory.getHiveMetastoreClient().grant_role(
+            roleName, hivePrincipal.getName(),
+            AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()),
+            grantorPrinc.getName(),
+            AuthorizationUtils.getThriftPrincipalType(grantorPrinc.getType()),grantOption);
         } catch (MetaException e) {
           throw new HiveAuthorizationPluginException(e.getMessage(), e);
         } catch (Exception e) {
@@ -227,11 +251,8 @@ public class SQLStdHiveAccessController 
     for(HivePrincipal hivePrincipal : hivePrincipals){
       for(String roleName : roleNames){
         try {
-          IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
-          mClient.revoke_role(roleName,
-              hivePrincipal.getName(),
-              AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType())
-              );
+          metastoreClientFactory.getHiveMetastoreClient().revoke_role(roleName,
+            hivePrincipal.getName(), AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()));
         }  catch (Exception e) {
           String msg = "Error revoking roles for " + hivePrincipal.getName() +  " to role " + roleName
               + hivePrincipal.getName();
@@ -257,11 +278,10 @@ public class SQLStdHiveAccessController 
     try {
 
       List<HivePrivilegeInfo> resPrivInfos = new ArrayList<HivePrivilegeInfo>();
-      IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
 
       //get metastore/thrift privilege object using metastore api
       List<HiveObjectPrivilege> msObjPrivs
-        = mClient.list_privileges(principal.getName(),
+        = metastoreClientFactory.getHiveMetastoreClient().list_privileges(principal.getName(),
             AuthorizationUtils.getThriftPrincipalType(principal.getType()),
             getThriftHiveObjectRef(privObj));
 
@@ -303,7 +323,6 @@ public class SQLStdHiveAccessController 
 
   }
 
-
   private HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType)
       throws HiveAuthorizationPluginException {
     switch(objectType){
@@ -320,4 +339,35 @@ public class SQLStdHiveAccessController 
     }
   }
 
+  @Override
+  public void setCurrentRole(String roleName) throws HiveAuthorizationPluginException {
+    if ("NONE".equalsIgnoreCase(roleName)) {
+      // for set role NONE, reset roles to default roles.
+      currentRoles.clear();
+      currentRoles.addAll(getRolesFromMS());
+      return;
+    }
+    for (HiveRole role : getRolesFromMS()) {
+      // set to one of the roles user belongs to.
+      if (role.getRoleName().equalsIgnoreCase(roleName)) {
+        currentRoles.clear();
+        currentRoles.add(role);
+        return;
+      }
+    }
+    // set to ADMIN role, if user belongs there.
+    if (HiveMetaStore.ADMIN.equalsIgnoreCase(roleName) && null != this.adminRole) {
+      currentRoles.clear();
+      currentRoles.add(adminRole);
+      return;
+    }
+    // If we are here it means, user is requesting a role he doesn't belong to.
+    throw new HiveAuthorizationPluginException(currentUserName +" doesn't belong to role "
+      +roleName);
+  }
+
+  @Override
+  public List<HiveRole> getCurrentRoles() throws HiveAuthorizationPluginException {
+    return currentRoles;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java Sat Feb  8 15:33:29 2014
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.securi
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl;
@@ -28,7 +29,7 @@ import org.apache.hadoop.hive.ql.securit
 public class SQLStdHiveAuthorizerFactory implements HiveAuthorizerFactory{
   @Override
   public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
-      HiveConf conf, String hiveCurrentUser) {
+      HiveConf conf, String hiveCurrentUser) throws HiveAuthorizationPluginException {
     return new HiveAuthorizerImpl(
         new SQLStdHiveAccessController(metastoreClientFactory, conf, hiveCurrentUser),
         new SQLStdHiveAuthorizationValidator()

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/processors/TestCommandProcessorFactory.java Sat Feb  8 15:33:29 2014
@@ -39,25 +39,26 @@ public class TestCommandProcessorFactory
   @Test
   public void testInvalidCommands() throws Exception {
     Assert.assertNull("Null should have returned null", CommandProcessorFactory.getForHiveCommand(null, conf));
-    Assert.assertNull("Blank should have returned null", CommandProcessorFactory.getForHiveCommand(" ", conf));
-    Assert.assertNull("SQL should have returned null", CommandProcessorFactory.getForHiveCommand("SELECT * FROM TABLE", conf));
+    Assert.assertNull("Blank should have returned null", CommandProcessorFactory.getForHiveCommand(new String[]{" "}, conf));
+    Assert.assertNull("set role should have returned null", CommandProcessorFactory.getForHiveCommand(new String[]{"set role"}, conf));
+    Assert.assertNull("SQL should have returned null", CommandProcessorFactory.getForHiveCommand(new String[]{"SELECT * FROM TABLE"}, conf));
   }
   @Test
   public void testAvailableCommands() throws Exception {
     SessionState.start(conf);
     for (HiveCommand command : HiveCommand.values()) {
       String cmd = command.name();
-      Assert.assertNotNull("Cmd " + cmd + " not return null", CommandProcessorFactory.getForHiveCommand(cmd, conf));
+      Assert.assertNotNull("Cmd " + cmd + " not return null", CommandProcessorFactory.getForHiveCommand(new String[]{cmd}, conf));
     }
     for (HiveCommand command : HiveCommand.values()) {
       String cmd = command.name().toLowerCase();
-      Assert.assertNotNull("Cmd " + cmd + " not return null", CommandProcessorFactory.getForHiveCommand(cmd, conf));
+      Assert.assertNotNull("Cmd " + cmd + " not return null", CommandProcessorFactory.getForHiveCommand(new String[]{cmd}, conf));
     }
     conf.set(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.toString(), "");
     for (HiveCommand command : HiveCommand.values()) {
       String cmd = command.name();
       try {
-        CommandProcessorFactory.getForHiveCommand(cmd, conf);
+        CommandProcessorFactory.getForHiveCommand(new String[]{cmd}, conf);
         Assert.fail("Expected SQLException for " + cmd + " as available commands is empty");
       } catch (SQLException e) {
         Assert.assertEquals("Insufficient privileges to execute " + cmd, e.getMessage());

Added: hive/trunk/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q?rev=1566029&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q Sat Feb  8 15:33:29 2014
@@ -0,0 +1,17 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+
+show current roles;
+
+create role r1;
+grant role r1 to user hive_test_user;
+set role r1;
+show current roles;
+
+set role PUBLIC;
+show current roles;
+
+set role NONE;
+show current roles;
+
+drop role r1;
+

Added: hive/trunk/ql/src/test/results/clientpositive/authorization_set_show_current_role.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_set_show_current_role.q.out?rev=1566029&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_set_show_current_role.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_set_show_current_role.q.out Sat Feb  8 15:33:29 2014
@@ -0,0 +1,49 @@
+PREHOOK: query: show current roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show current roles
+POSTHOOK: type: SHOW_ROLES
+PUBLIC
+
+PREHOOK: query: create role r1
+PREHOOK: type: CREATEROLE
+POSTHOOK: query: create role r1
+POSTHOOK: type: CREATEROLE
+PREHOOK: query: grant role r1 to user hive_test_user
+PREHOOK: type: GRANT_ROLE
+POSTHOOK: query: grant role r1 to user hive_test_user
+POSTHOOK: type: GRANT_ROLE
+PREHOOK: query: set role r1
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: set role r1
+POSTHOOK: type: SHOW_ROLES
+PREHOOK: query: show current roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show current roles
+POSTHOOK: type: SHOW_ROLES
+r1
+
+PREHOOK: query: set role PUBLIC
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: set role PUBLIC
+POSTHOOK: type: SHOW_ROLES
+PREHOOK: query: show current roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show current roles
+POSTHOOK: type: SHOW_ROLES
+PUBLIC
+
+PREHOOK: query: set role NONE
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: set role NONE
+POSTHOOK: type: SHOW_ROLES
+PREHOOK: query: show current roles
+PREHOOK: type: SHOW_ROLES
+POSTHOOK: query: show current roles
+POSTHOOK: type: SHOW_ROLES
+r1
+PUBLIC
+
+PREHOOK: query: drop role r1
+PREHOOK: type: DROPROLE
+POSTHOOK: query: drop role r1
+POSTHOOK: type: DROPROLE

Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java?rev=1566029&r1=1566028&r2=1566029&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java Sat Feb  8 15:33:29 2014
@@ -51,10 +51,9 @@ public abstract class ExecuteStatementOp
       HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync)
       throws HiveSQLException {
     String[] tokens = statement.trim().split("\\s+");
-    String command = tokens[0].toLowerCase();
     CommandProcessor processor = null;
     try {
-      processor = CommandProcessorFactory.getForHiveCommand(tokens[0], parentSession.getHiveConf());
+      processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf());
     } catch (SQLException e) {
       throw new HiveSQLException(e.getMessage(), e.getSQLState(), e);
     }