You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/02/14 11:53:39 UTC

svn commit: r1568236 [13/25] - in /hive/branches/tez: ./ bin/ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ conf/ data/...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Feb 14 10:53:31 2014
@@ -25,6 +25,7 @@ import java.io.Serializable;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -56,6 +57,7 @@ import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -92,6 +94,10 @@ import org.apache.hadoop.hive.ql.plan.De
 import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
 import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
@@ -99,7 +105,6 @@ import org.apache.hadoop.hive.ql.plan.Lo
 import org.apache.hadoop.hive.ql.plan.LockTableDesc;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.MsckDesc;
-import org.apache.hadoop.hive.ql.plan.PartitionSpec;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc;
 import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
@@ -125,10 +130,13 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
+import com.google.common.collect.Lists;
+
 /**
  * DDLSemanticAnalyzer.
  *
@@ -452,11 +460,33 @@ public class DDLSemanticAnalyzer extends
    case HiveParser.TOK_EXCHANGEPARTITION:
       analyzeExchangePartition(ast);
       break;
+   case HiveParser.TOK_SHOW_SET_ROLE:
+     analyzeSetShowRole(ast);
+     break;
     default:
       throw new SemanticException("Unsupported command.");
     }
   }
 
+  private void analyzeSetShowRole(ASTNode ast) throws SemanticException {
+    switch (ast.getChildCount()) {
+      case 0:
+        ctx.setResFile(ctx.getLocalTmpPath());
+        rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask(
+        getInputs(), getOutputs(), ctx.getResFile()));
+        setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema()));
+        break;
+      case 1:
+        rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask(
+        BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()),
+        getInputs(), getOutputs()));
+        break;
+      default:
+        throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. "
+        + ast.dump());
+    }
+  }
+
   private void analyzeGrantRevokeRole(boolean grant, ASTNode ast) throws SemanticException {
     Task<? extends Serializable> task;
     if(grant) {
@@ -515,7 +545,7 @@ public class DDLSemanticAnalyzer extends
         createShowRoleGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs());
     if(task != null) {
       rootTasks.add(task);
-      setFetchTask(createFetchTask(RoleDDLDesc.getSchema()));
+      setFetchTask(createFetchTask(RoleDDLDesc.getRoleDescSchema()));
     }
   }
 
@@ -525,7 +555,7 @@ public class DDLSemanticAnalyzer extends
     showRolesDesc.setResFile(ctx.getResFile().toString());
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         showRolesDesc), conf));
-    setFetchTask(createFetchTask(RoleDDLDesc.getSchema()));
+    setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema()));
   }
 
   private void analyzeAlterDatabase(ASTNode ast) throws SemanticException {
@@ -700,8 +730,7 @@ public class DDLSemanticAnalyzer extends
       outputs.add(new WriteEntity(tab));
     }
 
-    DropTableDesc dropTblDesc = new DropTableDesc(
-        tableName, expectView, ifExists, true);
+    DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView, ifExists);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropTblDesc), conf));
   }
@@ -934,7 +963,7 @@ public class DDLSemanticAnalyzer extends
         break;
       case HiveParser.TOK_CREATEINDEX_INDEXTBLNAME:
         ASTNode ch = (ASTNode) child.getChild(0);
-        indexTableName = getUnescapedName((ASTNode) ch);
+        indexTableName = getUnescapedName(ch);
         break;
       case HiveParser.TOK_DEFERRED_REBUILDINDEX:
         deferredRebuild = true;
@@ -2114,7 +2143,7 @@ public class DDLSemanticAnalyzer extends
       for (int i = 0; i < ast.getChildCount(); i++) {
         ASTNode child = (ASTNode) ast.getChild(i);
         if (child.getType() == HiveParser.TOK_TABTYPE) {
-          ASTNode tableTypeExpr = (ASTNode) child;
+          ASTNode tableTypeExpr = child;
           tableName =
             QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0));
           // get partition metadata if partition specified
@@ -2339,7 +2368,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeAlterTableRenamePart(ASTNode ast, String tblName,
       HashMap<String, String> oldPartSpec) throws SemanticException {
-    Map<String, String> newPartSpec = extractPartitionSpecs((ASTNode) ast.getChild(0));
+    Map<String, String> newPartSpec = extractPartitionSpecs(ast.getChild(0));
     if (newPartSpec == null) {
       throw new SemanticException("RENAME PARTITION Missing Destination" + ast);
     }
@@ -2388,49 +2417,31 @@ public class DDLSemanticAnalyzer extends
   private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView)
       throws SemanticException {
 
+    boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null)
+        || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
+    // If the drop has to fail on non-existent partitions, we cannot batch expressions.
+    // That is because we actually have to check each separate expression for existence.
+    // We could do a small optimization for the case where expr has all columns and all
+    // operators are equality, if we assume those would always match one partition (which
+    // may not be true with legacy, non-normalized column values). This is probably a
+    // popular case but that's kinda hacky. Let's not do it for now.
+    boolean canGroupExprs = ifExists;
+
     String tblName = getUnescapedName((ASTNode) ast.getChild(0));
-    // get table metadata
-    List<PartitionSpec> partSpecs = getFullPartitionSpecs(ast);
     Table tab = getTable(tblName, true);
+    Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs =
+        getFullPartitionSpecs(ast, tab, canGroupExprs);
+    if (partSpecs.isEmpty()) return; // nothing to do
+
     validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
     inputs.add(new ReadEntity(tab));
 
-    // Find out if all partition columns are strings. This is needed for JDO
-    boolean stringPartitionColumns = true;
-    List<FieldSchema> partCols = tab.getPartCols();
-
-    for (FieldSchema partCol : partCols) {
-      if (!partCol.getType().toLowerCase().equals("string")) {
-        stringPartitionColumns = false;
-        break;
-      }
-    }
-
-    // Only equality is supported for non-string partition columns
-    if (!stringPartitionColumns) {
-      for (PartitionSpec partSpec : partSpecs) {
-        if (partSpec.isNonEqualityOperator()) {
-          throw new SemanticException(
-              ErrorMsg.DROP_PARTITION_NON_STRING_PARTCOLS_NONEQUALITY.getMsg());
-        }
-      }
-    }
+    boolean ignoreProtection = ast.getFirstChildWithType(HiveParser.TOK_IGNOREPROTECTION) != null;
+    addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists, ignoreProtection);
 
-    boolean ignoreProtection = (ast.getFirstChildWithType(HiveParser.TOK_IGNOREPROTECTION) != null);
-    if (partSpecs != null) {
-      boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
-      // we want to signal an error if the partition doesn't exist and we're
-      // configured not to fail silently
-      boolean throwException =
-          !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
-      addTableDropPartsOutputs(tblName, partSpecs, throwException,
-                                stringPartitionColumns, ignoreProtection);
-    }
     DropTableDesc dropTblDesc =
-        new DropTableDesc(tblName, partSpecs, expectView, stringPartitionColumns, ignoreProtection);
-
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-        dropTblDesc), conf));
+        new DropTableDesc(tblName, partSpecs, expectView, ignoreProtection);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf));
   }
 
   private void analyzeAlterTablePartColType(ASTNode ast)
@@ -2528,7 +2539,7 @@ public class DDLSemanticAnalyzer extends
         }
         currentPart = getPartSpec(child);
         validatePartitionValues(currentPart); // validate reserved values
-        validatePartSpec(tab, currentPart, (ASTNode)child, conf, true);
+        validatePartSpec(tab, currentPart, child, conf, true);
         break;
       case HiveParser.TOK_PARTITIONLOCATION:
         // if location specified, set in partition
@@ -2739,34 +2750,84 @@ public class DDLSemanticAnalyzer extends
    * Get the partition specs from the tree. This stores the full specification
    * with the comparator operator into the output list.
    *
-   * @param ast
-   *          Tree to extract partitions from.
-   * @return A list of PartitionSpec objects which contain the mapping from
-   *         key to operator and value.
-   * @throws SemanticException
-   */
-  private List<PartitionSpec> getFullPartitionSpecs(CommonTree ast)
-      throws SemanticException {
-    List<PartitionSpec> partSpecList = new ArrayList<PartitionSpec>();
+   * @param ast Tree to extract partitions from.
+   * @param tab Table.
+   * @param result Map of partitions by prefix length. Most of the time prefix length will
+   *               be the same for all partition specs, so we can just OR the expressions.
+   */
+  private Map<Integer, List<ExprNodeGenericFuncDesc>> getFullPartitionSpecs(
+      CommonTree ast, Table tab, boolean canGroupExprs) throws SemanticException {
+    Map<String, String> colTypes = new HashMap<String, String>();
+    for (FieldSchema fs : tab.getPartitionKeys()) {
+      colTypes.put(fs.getName().toLowerCase(), fs.getType());
+    }
 
+    Map<Integer, List<ExprNodeGenericFuncDesc>> result =
+        new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
     for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
       Tree partSpecTree = ast.getChild(childIndex);
-      if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) {
-        PartitionSpec partSpec = new PartitionSpec();
+      if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue;
+      ExprNodeGenericFuncDesc expr = null;
+      HashSet<String> names = new HashSet<String>(partSpecTree.getChildCount());
+      for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
+        CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
+        assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
+        String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
+        String operator = partSpecSingleKey.getChild(1).getText();
+        String val = stripQuotes(partSpecSingleKey.getChild(2).getText());
+
+        String type = colTypes.get(key);
+        if (type == null) {
+          throw new SemanticException("Column " + key + " not found");
+        }
+        // Create the corresponding hive expression to filter on partition columns.
+        ExprNodeColumnDesc column = new ExprNodeColumnDesc(
+            TypeInfoFactory.getPrimitiveTypeInfo(type), key, null, true);
+        ExprNodeGenericFuncDesc op = makeBinaryPredicate(
+            operator, column, new ExprNodeConstantDesc(val));
+        // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
+        expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
+        names.add(key);
+      }
+      if (expr == null) continue;
+      // We got the expr for one full partition spec. Determine the prefix length.
+      int prefixLength = calculatePartPrefix(tab, names);
+      List<ExprNodeGenericFuncDesc> orExpr = result.get(prefixLength);
+      // We have to tell apart partitions resulting from spec with different prefix lengths.
+      // So, if we already have smth for the same prefix length, we can OR the two.
+      // If we don't, create a new separate filter. In most cases there will only be one.
+      if (orExpr == null) {
+        result.put(prefixLength, Lists.newArrayList(expr));
+      } else if (canGroupExprs) {
+        orExpr.set(0, makeBinaryPredicate("or", expr, orExpr.get(0)));
+      } else {
+        orExpr.add(expr);
+      }
+    }
+    return result;
+  }
 
-        for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
-          CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
-          assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
-          String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
-          String operator = partSpecSingleKey.getChild(1).getText();
-          String val = partSpecSingleKey.getChild(2).getText();
-          partSpec.addPredicate(key, operator, val);
-        }
+  private static ExprNodeGenericFuncDesc makeBinaryPredicate(
+      String fn, ExprNodeDesc left, ExprNodeDesc right) {
+    return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
+        FunctionRegistry.getFunctionInfo(fn).getGenericUDF(), Lists.newArrayList(left, right));
+  }
 
-        partSpecList.add(partSpec);
-      }
+  /**
+   * Calculates the partition prefix length based on the drop spec.
+   * This is used to avoid deleting archived partitions with lower level.
+   * For example, if, for A and B key cols, drop spec is A=5, B=6, we shouldn't drop
+   * archived A=5/, because it can contain B-s other than 6.
+   * @param tbl Table
+   * @param partSpecKeys Keys present in drop partition spec.
+   */
+  private int calculatePartPrefix(Table tbl, HashSet<String> partSpecKeys) {
+    int partPrefixToDrop = 0;
+    for (FieldSchema fs : tbl.getPartCols()) {
+      if (!partSpecKeys.contains(fs.getName())) break;
+      ++partPrefixToDrop;
     }
-    return partSpecList;
+    return partPrefixToDrop;
   }
 
   /**
@@ -2857,42 +2918,42 @@ public class DDLSemanticAnalyzer extends
    * pre-execution hook. If the partition does not exist, throw an error if
    * throwIfNonExistent is true, otherwise ignore it.
    */
-  private void addTableDropPartsOutputs(String tblName, List<PartitionSpec> partSpecs,
-      boolean throwIfNonExistent, boolean stringPartitionColumns, boolean ignoreProtection)
-      throws SemanticException {
-    Table tab = getTable(tblName);
-
-    Iterator<PartitionSpec> i;
-    int index;
-    for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
-      PartitionSpec partSpec = i.next();
-      List<Partition> parts = null;
-      if (stringPartitionColumns) {
+  private void addTableDropPartsOutputs(Table tab,
+      Collection<List<ExprNodeGenericFuncDesc>> partSpecs, boolean throwIfNonExistent,
+      boolean ignoreProtection) throws SemanticException {
+
+    for (List<ExprNodeGenericFuncDesc> specs : partSpecs) {
+      for (ExprNodeGenericFuncDesc partSpec : specs) {
+        List<Partition> parts = new ArrayList<Partition>();
+        boolean hasUnknown = false;
         try {
-          parts = db.getPartitionsByFilter(tab, partSpec.toString());
+          hasUnknown = db.getPartitionsByExpr(tab, partSpec, conf, parts);
         } catch (Exception e) {
-          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
+          throw new SemanticException(
+              ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()), e);
         }
-      }
-      else {
-        try {
-          parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
-        } catch (Exception e) {
-          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
+        if (hasUnknown) {
+          throw new SemanticException(
+              "Unexpected unknown partitions for " + partSpec.getExprString());
         }
-      }
 
-      if (parts.isEmpty()) {
-        if (throwIfNonExistent) {
-          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
+        // TODO: ifExists could be moved to metastore. In fact it already supports that. Check it
+        //       for now since we get parts for output anyway, so we can get the error message
+        //       earlier... If we get rid of output, we can get rid of this.
+        if (parts.isEmpty()) {
+          if (throwIfNonExistent) {
+            throw new SemanticException(
+                ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()));
+          }
         }
-      }
-      for (Partition p : parts) {
-        if (!ignoreProtection && !p.canDrop()) {
-          throw new SemanticException(
-            ErrorMsg.DROP_COMMAND_NOT_ALLOWED_FOR_PARTITION.getMsg(p.getCompleteName()));
+        for (Partition p : parts) {
+          // TODO: same thing, metastore already checks this but check here if we can.
+          if (!ignoreProtection && !p.canDrop()) {
+            throw new SemanticException(
+              ErrorMsg.DROP_COMMAND_NOT_ALLOWED_FOR_PARTITION.getMsg(p.getCompleteName()));
+          }
+          outputs.add(new WriteEntity(p));
         }
-        outputs.add(new WriteEntity(p));
       }
     }
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Fri Feb 14 10:53:31 2014
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
@@ -55,6 +56,12 @@ public class FunctionSemanticAnalyzer ex
   private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
     String className = unescapeSQLString(ast.getChild(1).getText());
+
+    // Temp functions are not allowed to have qualified names.
+    if (FunctionUtils.isQualifiedFunctionName(functionName)) {
+      throw new SemanticException("Temporary function cannot be created with a qualified name.");
+    }
+
     CreateFunctionDesc desc = new CreateFunctionDesc(functionName, className);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java Fri Feb 14 10:53:31 2014
@@ -49,8 +49,15 @@ public class GenTezWork implements NodeP
 
   static final private Log LOG = LogFactory.getLog(GenTezWork.class.getName());
 
-  // sequence number is used to name vertices (e.g.: Map 1, Reduce 14, ...)
-  private int sequenceNumber = 0;
+  // instance of shared utils
+  private GenTezUtils utils = null;
+
+  /**
+   * Constructor takes utils as parameter to facilitate testing
+   */
+  public GenTezWork(GenTezUtils utils) {
+    this.utils = utils;
+  }
 
   @Override
   public Object process(Node nd, Stack<Node> stack,
@@ -92,9 +99,9 @@ public class GenTezWork implements NodeP
     } else {
       // create a new vertex
       if (context.preceedingWork == null) {
-        work = createMapWork(context, root, tezWork);
+        work = utils.createMapWork(context, root, tezWork, null);
       } else {
-        work = createReduceWork(context, root, tezWork);
+        work = utils.createReduceWork(context, root, tezWork);
       }
       context.rootToWorkMap.put(root, work);
     }
@@ -186,74 +193,4 @@ public class GenTezWork implements NodeP
 
     return null;
   }
-
-  protected ReduceWork createReduceWork(GenTezProcContext context, Operator<?> root,
-      TezWork tezWork) {
-    assert !root.getParentOperators().isEmpty();
-    ReduceWork reduceWork = new ReduceWork("Reducer "+ (++sequenceNumber));
-    LOG.debug("Adding reduce work (" + reduceWork.getName() + ") for " + root);
-    reduceWork.setReducer(root);
-    reduceWork.setNeedsTagging(GenMapRedUtils.needsTagging(reduceWork));
-
-    // All parents should be reduce sinks. We pick the one we just walked
-    // to choose the number of reducers. In the join/union case they will
-    // all be -1. In sort/order case where it matters there will be only
-    // one parent.
-    assert context.parentOfRoot instanceof ReduceSinkOperator;
-    ReduceSinkOperator reduceSink = (ReduceSinkOperator) context.parentOfRoot;
-
-    reduceWork.setNumReduceTasks(reduceSink.getConf().getNumReducers());
-
-    setupReduceSink(context, reduceWork, reduceSink);
-
-    tezWork.add(reduceWork);
-    tezWork.connect(
-        context.preceedingWork,
-        reduceWork, EdgeType.SIMPLE_EDGE);
-
-    return reduceWork;
-  }
-
-  protected void setupReduceSink(GenTezProcContext context, ReduceWork reduceWork,
-      ReduceSinkOperator reduceSink) {
-
-    LOG.debug("Setting up reduce sink: " + reduceSink
-        + " with following reduce work: " + reduceWork.getName());
-
-    // need to fill in information about the key and value in the reducer
-    GenMapRedUtils.setKeyAndValueDesc(reduceWork, reduceSink);
-
-    // remember which parent belongs to which tag
-    reduceWork.getTagToInput().put(reduceSink.getConf().getTag(),
-         context.preceedingWork.getName());
-
-    // remember the output name of the reduce sink
-    reduceSink.getConf().setOutputName(reduceWork.getName());
-  }
-
-  protected MapWork createMapWork(GenTezProcContext context, Operator<?> root,
-      TezWork tezWork) throws SemanticException {
-    assert root.getParentOperators().isEmpty();
-    MapWork mapWork = new MapWork("Map "+ (++sequenceNumber));
-    LOG.debug("Adding map work (" + mapWork.getName() + ") for " + root);
-
-    // map work starts with table scan operators
-    assert root instanceof TableScanOperator;
-    String alias = ((TableScanOperator)root).getConf().getAlias();
-
-    setupMapWork(mapWork, context, root, alias);
-
-    // add new item to the tez work
-    tezWork.add(mapWork);
-
-    return mapWork;
-  }
-
-  // this method's main use is to help unit testing this class
-  protected void setupMapWork(MapWork mapWork, GenTezProcContext context,
-      Operator<? extends OperatorDesc> root, String alias) throws SemanticException {
-    // All the setup is done in GenMapRedUtils
-    GenMapRedUtils.setMapWork(mapWork, context.parseContext,
-        context.inputs, null, root, alias, context.conf, false);
-  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Fri Feb 14 10:53:31 2014
@@ -153,6 +153,7 @@ KW_SEQUENCEFILE: 'SEQUENCEFILE';
 KW_TEXTFILE: 'TEXTFILE';
 KW_RCFILE: 'RCFILE';
 KW_ORCFILE: 'ORC';
+KW_PARQUETFILE: 'PARQUET';
 KW_INPUTFORMAT: 'INPUTFORMAT';
 KW_OUTPUTFORMAT: 'OUTPUTFORMAT';
 KW_INPUTDRIVER: 'INPUTDRIVER';

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Fri Feb 14 10:53:31 2014
@@ -183,6 +183,7 @@ TOK_TABLEROWFORMATMAPKEYS;
 TOK_TABLEROWFORMATLINES;
 TOK_TABLEROWFORMATNULL;
 TOK_TBLORCFILE;
+TOK_TBLPARQUETFILE;
 TOK_TBLSEQUENCEFILE;
 TOK_TBLTEXTFILE;
 TOK_TBLRCFILE;
@@ -281,6 +282,7 @@ TOK_GRANT_ROLE;
 TOK_REVOKE_ROLE;
 TOK_SHOW_ROLE_GRANT;
 TOK_SHOW_ROLES;
+TOK_SHOW_SET_ROLE;
 TOK_SHOWINDEXES;
 TOK_SHOWDBLOCKS;
 TOK_INDEXCOMMENT;
@@ -317,6 +319,7 @@ TOK_SUBQUERY_OP_NOTIN;
 TOK_SUBQUERY_OP_NOTEXISTS;
 TOK_DB_TYPE;
 TOK_TABLE_TYPE;
+TOK_CTE;
 }
 
 
@@ -670,6 +673,8 @@ ddlStatement
     | showRoles
     | grantRole
     | revokeRole
+    | setRole
+    | showCurrentRole
     ;
 
 ifExists
@@ -787,7 +792,7 @@ createTableStatement
          tableFileFormat?
          tableLocation?
          tablePropertiesPrefixed?
-         (KW_AS selectStatement[true])?
+         (KW_AS selectStatementWithCTE)?
       )
     -> ^(TOK_CREATETABLE $name $ext? ifNotExists?
          ^(TOK_LIKETABLE $likeName?)
@@ -800,7 +805,7 @@ createTableStatement
          tableFileFormat?
          tableLocation?
          tablePropertiesPrefixed?
-         selectStatement?
+         selectStatementWithCTE?
         )
     ;
 
@@ -939,8 +944,8 @@ alterViewStatementSuffix
         -> ^(TOK_ALTERVIEW_ADDPARTS alterStatementSuffixAddPartitions)
     | alterStatementSuffixDropPartitions
         -> ^(TOK_ALTERVIEW_DROPPARTS alterStatementSuffixDropPartitions)
-    | name=tableName KW_AS selectStatement[true]
-        -> ^(TOK_ALTERVIEW_AS $name selectStatement)
+    | name=tableName KW_AS selectStatementWithCTE
+        -> ^(TOK_ALTERVIEW_AS $name selectStatementWithCTE)
     ;
 
 alterIndexStatementSuffix
@@ -1222,6 +1227,7 @@ fileFormat
     | KW_TEXTFILE  -> ^(TOK_TBLTEXTFILE)
     | KW_RCFILE  -> ^(TOK_TBLRCFILE)
     | KW_ORCFILE -> ^(TOK_TBLORCFILE)
+    | KW_PARQUETFILE -> ^(TOK_TBLPARQUETFILE)
     | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
       -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
     | genericSpec=identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
@@ -1274,7 +1280,7 @@ showStatement
     | KW_SHOW KW_TABLES ((KW_FROM|KW_IN) db_name=identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?  -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? showStmtIdentifier?)
     | KW_SHOW KW_COLUMNS (KW_FROM|KW_IN) tabname=tableName ((KW_FROM|KW_IN) db_name=identifier)? 
     -> ^(TOK_SHOWCOLUMNS $db_name? $tabname)
-    | KW_SHOW KW_FUNCTIONS showStmtIdentifier?  -> ^(TOK_SHOWFUNCTIONS showStmtIdentifier?)
+    | KW_SHOW KW_FUNCTIONS showFunctionIdentifier?  -> ^(TOK_SHOWFUNCTIONS showFunctionIdentifier?)
     | KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec?) 
     | KW_SHOW KW_CREATE KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName)
     | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=identifier)? KW_LIKE showStmtIdentifier partitionSpec?
@@ -1375,6 +1381,20 @@ showRoles
     -> ^(TOK_SHOW_ROLES)
     ;
 
+showCurrentRole
+@init {pushMsg("show current role", state);}
+@after {popMsg(state);}
+    : KW_SHOW KW_CURRENT KW_ROLES
+    -> ^(TOK_SHOW_SET_ROLE)
+    ;
+
+setRole
+@init {pushMsg("set role", state);}
+@after {popMsg(state);}
+    : KW_SET KW_ROLE roleName=identifier
+    -> ^(TOK_SHOW_SET_ROLE $roleName)
+    ;
+
 showGrants
 @init {pushMsg("show grants", state);}
 @after {popMsg(state);}
@@ -1476,15 +1496,15 @@ metastoreCheck
 createFunctionStatement
 @init { pushMsg("create function statement", state); }
 @after { popMsg(state); }
-    : KW_CREATE KW_TEMPORARY KW_FUNCTION identifier KW_AS StringLiteral
-    -> ^(TOK_CREATEFUNCTION identifier StringLiteral)
+    : KW_CREATE KW_TEMPORARY KW_FUNCTION functionIdentifier KW_AS StringLiteral
+    -> ^(TOK_CREATEFUNCTION functionIdentifier StringLiteral)
     ;
 
 dropFunctionStatement
 @init { pushMsg("drop temporary function statement", state); }
 @after { popMsg(state); }
-    : KW_DROP KW_TEMPORARY KW_FUNCTION ifExists? identifier
-    -> ^(TOK_DROPFUNCTION identifier ifExists?)
+    : KW_DROP KW_TEMPORARY KW_FUNCTION ifExists? functionIdentifier
+    -> ^(TOK_DROPFUNCTION functionIdentifier ifExists?)
     ;
 
 createMacroStatement
@@ -1511,14 +1531,14 @@ createViewStatement
         (LPAREN columnNameCommentList RPAREN)? tableComment? viewPartition?
         tablePropertiesPrefixed?
         KW_AS
-        selectStatement[true]
+        selectStatementWithCTE
     -> ^(TOK_CREATEVIEW $name orReplace?
          ifNotExists?
          columnNameCommentList?
          tableComment?
          viewPartition?
          tablePropertiesPrefixed?
-         selectStatement
+         selectStatementWithCTE
         )
     ;
 
@@ -1535,6 +1555,13 @@ dropViewStatement
     : KW_DROP KW_VIEW ifExists? viewName -> ^(TOK_DROPVIEW viewName ifExists?)
     ;
 
+showFunctionIdentifier
+@init { pushMsg("identifier for show function statement", state); }
+@after { popMsg(state); }
+    : functionIdentifier
+    | StringLiteral
+    ;
+
 showStmtIdentifier
 @init { pushMsg("identifier for show statement", state); }
 @after { popMsg(state); }
@@ -1703,6 +1730,7 @@ tableFileFormat
       | KW_STORED KW_AS KW_TEXTFILE  -> TOK_TBLTEXTFILE
       | KW_STORED KW_AS KW_RCFILE  -> TOK_TBLRCFILE
       | KW_STORED KW_AS KW_ORCFILE -> TOK_TBLORCFILE
+      | KW_STORED KW_AS KW_PARQUETFILE -> TOK_TBLPARQUETFILE
       | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
       -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
       | KW_STORED KW_BY storageHandler=StringLiteral
@@ -1910,10 +1938,36 @@ setOperator
 
 queryStatementExpression[boolean topLevel]
     :
+    /* Would be nice to do this as a gated semantic perdicate
+       But the predicate gets pushed as a lookahead decision.
+       Calling rule doesnot know about topLevel
+    */
+    (w=withClause {topLevel}?)?
+    queryStatementExpressionBody[topLevel] {
+      if ($w.tree != null) {
+      adaptor.addChild($queryStatementExpressionBody.tree, $w.tree);
+      }
+    }
+    ->  queryStatementExpressionBody
+    ;
+
+queryStatementExpressionBody[boolean topLevel]
+    :
     fromStatement[topLevel]
     | regularBody[topLevel]
     ;
-    
+
+withClause
+  :
+  KW_WITH cteStatement (COMMA cteStatement)* -> ^(TOK_CTE cteStatement+)
+;
+
+cteStatement
+   :
+   identifier KW_AS LPAREN queryStatementExpression[false] RPAREN
+   -> ^(TOK_SUBQUERY queryStatementExpression identifier)
+;
+
 fromStatement[boolean topLevel]
 : (singleFromStatement  -> singleFromStatement)
 	(u=setOperator r=singleFromStatement
@@ -1973,7 +2027,7 @@ regularBody[boolean topLevel]
 singleSelectStatement
    :
    selectClause
-   fromClause
+   fromClause?
    whereClause?
    groupByClause?
    havingClause?
@@ -1982,11 +2036,22 @@ singleSelectStatement
    distributeByClause?
    sortByClause?
    window_clause?
-   limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
+   limitClause? -> ^(TOK_QUERY fromClause? ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
                      selectClause whereClause? groupByClause? havingClause? orderByClause? clusterByClause?
                      distributeByClause? sortByClause? window_clause? limitClause?))
    ;
 
+selectStatementWithCTE
+    :
+    (w=withClause)?
+    selectStatement[true] {
+      if ($w.tree != null) {
+      adaptor.addChild($selectStatement.tree, $w.tree);
+      }
+    }
+    ->  selectStatement
+    ;
+
 body
    :
    insertClause

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Fri Feb 14 10:53:31 2014
@@ -183,7 +183,7 @@ functionName
 @init { gParent.pushMsg("function name", state); }
 @after { gParent.popMsg(state); }
     : // Keyword IF is also a function name
-    KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE | identifier
+    KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE | functionIdentifier
     ;
 
 castExpression
@@ -268,10 +268,10 @@ atomExpression
     KW_NULL -> TOK_NULL
     | dateLiteral
     | constant
-    | function
     | castExpression
     | caseExpression
     | whenExpression
+    | (functionName LPAREN) => function
     | tableOrColumn
     | LPAREN! expression RPAREN!
     ;
@@ -524,7 +524,7 @@ descFuncNames
     :
       sysFuncNames
     | StringLiteral
-    | identifier
+    | functionIdentifier
     ;
 
 identifier
@@ -533,7 +533,16 @@ identifier
     | nonReserved -> Identifier[$nonReserved.text]
     ;
 
+functionIdentifier
+@init { gParent.pushMsg("function identifier", state); }
+@after { gParent.popMsg(state); }
+    : db=identifier DOT fn=identifier
+    -> Identifier[$db.text + "." + $fn.text]
+    |
+    identifier
+    ;
+
 nonReserved
     :
-    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW
 _WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN
+    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE |
  KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN
     ;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java Fri Feb 14 10:53:31 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.Node;
@@ -73,6 +74,12 @@ public class MacroSemanticAnalyzer exten
   @SuppressWarnings("unchecked")
   private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
+
+    // Temp macros are not allowed to have qualified names.
+    if (FunctionUtils.isQualifiedFunctionName(functionName)) {
+      throw new SemanticException("Temporary macro cannot be created with a qualified name.");
+    }
+
     List<FieldSchema> arguments =
       BaseSemanticAnalyzer.getColumns((ASTNode)ast.getChild(1), true);
     boolean isNoArgumentMacro = arguments.size() == 0;
@@ -80,6 +87,7 @@ public class MacroSemanticAnalyzer exten
     ArrayList<String> macroColNames = new ArrayList<String>(arguments.size());
     ArrayList<TypeInfo> macroColTypes = new ArrayList<TypeInfo>(arguments.size());
     final Set<String> actualColumnNames = new HashSet<String>();
+
     if(!isNoArgumentMacro) {
       /*
        * Walk down expression to see which arguments are actually used.

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Fri Feb 14 10:53:31 2014
@@ -219,6 +219,10 @@ public class QB {
     aliasToSubq.put(alias, qbexpr);
   }
 
+  public void rewriteCTEToSubq(String alias, String cteName, QBExpr qbexpr) {
+    rewriteViewToSubq(alias, cteName, qbexpr);
+  }
+
   public QBJoinTree getQbJoinTree() {
     return qbjoin;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Feb 14 10:53:31 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.io.IOException;
 import java.io.Serializable;
-import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -39,6 +39,8 @@ import org.antlr.runtime.tree.Tree;
 import org.antlr.runtime.tree.TreeWizard;
 import org.antlr.runtime.tree.TreeWizard.ContextVisitor;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.common.JavaUtils;
@@ -75,8 +77,10 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
+import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
+import org.apache.hadoop.hive.ql.io.NullRowsInputFormat;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.GraphWalker;
@@ -165,6 +169,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.NullStructSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -178,6 +183,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.InputFormat;
 
 /**
@@ -187,6 +193,10 @@ import org.apache.hadoop.mapred.InputFor
  */
 
 public class SemanticAnalyzer extends BaseSemanticAnalyzer {
+
+  public static final String DUMMY_DATABASE = "_dummy_database";
+  public static final String DUMMY_TABLE = "_dummy_table";
+
   private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
   private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
   private HashMap<String, Operator<? extends OperatorDesc>> topOps;
@@ -241,6 +251,15 @@ public class SemanticAnalyzer extends Ba
   //flag for partial scan during analyze ... compute statistics
   protected boolean partialscan = false;
 
+  /*
+   * Capture the CTE definitions in a Query.
+   */
+  private final Map<String, ASTNode> aliasToCTEs;
+  /*
+   * Used to check recursive CTE invocations. Similar to viewsExpanded
+   */
+  private ArrayList<String> ctesExpanded;
+
   private static class Phase1Ctx {
     String dest;
     int nextNum;
@@ -276,6 +295,7 @@ public class SemanticAnalyzer extends Ba
         HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME);
     queryProperties = new QueryProperties();
     opToPartToSkewedPruner = new HashMap<TableScanOperator, Map<String, ExprNodeDesc>>();
+    aliasToCTEs = new HashMap<String, ASTNode>();
   }
 
   @Override
@@ -295,6 +315,7 @@ public class SemanticAnalyzer extends Ba
     opParseCtx.clear();
     groupOpToInputTables.clear();
     prunedPartitions.clear();
+    aliasToCTEs.clear();
   }
 
   public void initParseCtx(ParseContext pctx) {
@@ -657,6 +678,98 @@ public class SemanticAnalyzer extends Ba
     return alias;
   }
 
+  /*
+   * Phase1: hold onto any CTE definitions in aliasToCTE.
+   * CTE definitions are global to the Query.
+   */
+  private void processCTE(QB qb, ASTNode ctes) throws SemanticException {
+
+    int numCTEs = ctes.getChildCount();
+
+    for(int i=0; i <numCTEs; i++) {
+      ASTNode cte = (ASTNode) ctes.getChild(i);
+      ASTNode cteQry = (ASTNode) cte.getChild(0);
+      String alias = unescapeIdentifier(cte.getChild(1).getText());
+
+      String qName = qb.getId() == null ? "" : qb.getId() + ":";
+      qName += alias.toLowerCase();
+
+      if ( aliasToCTEs.containsKey(qName)) {
+        throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(cte.getChild(1)));
+      }
+      aliasToCTEs.put(qName, cteQry);
+    }
+  }
+
+  /*
+   * We allow CTE definitions in views. So we can end up with a hierarchy of CTE definitions:
+   * - at the top level of a query statement
+   * - where a view is referenced.
+   * - views may refer to other views.
+   *
+   * The scoping rules we use are: to search for a CTE from the current QB outwards. In order to
+   * disambiguate between CTES are different levels we qualify(prefix) them with the id of the QB
+   * they appear in when adding them to the <code>aliasToCTEs</code> map.
+   * 
+   */
+  private ASTNode findCTEFromName(QB qb, String cteName) {
+
+    /*
+     * When saving a view definition all table references in the AST are qualified; including CTE references.
+     * Where as CTE definitions have no DB qualifier; so we strip out the DB qualifier before searching in 
+     * <code>aliasToCTEs</code> map.
+     */
+    String currDB = SessionState.get().getCurrentDatabase();
+    if ( currDB != null && cteName.startsWith(currDB) &&
+        cteName.length() > currDB.length() &&
+        cteName.charAt(currDB.length()) == '.'   ) {
+      cteName = cteName.substring(currDB.length() + 1);
+    }
+
+    StringBuffer qId = new StringBuffer();
+    if (qb.getId() != null) {
+      qId.append(qb.getId());
+    }
+
+    while (qId.length() > 0) {
+      String nm = qId + ":" + cteName;
+      if (aliasToCTEs.containsKey(nm)) {
+        return aliasToCTEs.get(nm);
+      }
+      int lastIndex = qId.lastIndexOf(":");
+      lastIndex = lastIndex < 0 ? 0 : lastIndex;
+      qId.setLength(lastIndex);
+    }
+    return aliasToCTEs.get(cteName);
+  }
+  
+  /*
+   * If a CTE is referenced in a QueryBlock:
+   * - add it as a SubQuery for now.
+   *   - SQ.alias is the alias used in QB. (if no alias is specified, 
+   *     it used the CTE name. Works just like table references)
+   *   - Adding SQ done by:
+   *     - copying AST of CTE
+   *     - setting ASTOrigin on cloned AST.
+   *   - trigger phase 1 on new QBExpr.
+   *   - update QB data structs: remove this as a table reference, move it to a SQ invocation. 
+   */
+  private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) throws SemanticException {
+    cteAlias = cteAlias == null ? cteName : cteAlias;
+    ASTNode cteQryNode = findCTEFromName(qb, cteName);
+    QBExpr cteQBExpr = new QBExpr(cteAlias);
+
+    String cteText = ctx.getTokenRewriteStream().toString(
+        cteQryNode.getTokenStartIndex(), cteQryNode.getTokenStopIndex());
+    final ASTNodeOrigin cteOrigin = new ASTNodeOrigin("CTE", cteName,
+        cteText, cteAlias, cteQryNode);
+    cteQryNode = (ASTNode) ParseDriver.adaptor.dupTree(cteQryNode);
+    SubQueryUtils.setOriginDeep(cteQryNode, cteOrigin);
+
+    doPhase1QBExpr(cteQryNode, cteQBExpr, qb.getId(), cteAlias);
+    qb.rewriteCTEToSubq(cteAlias, cteName, cteQBExpr);
+  }
+
   private boolean isJoinToken(ASTNode node) {
     if ((node.getToken().getType() == HiveParser.TOK_JOIN)
         || (node.getToken().getType() == HiveParser.TOK_CROSSJOIN)
@@ -1032,6 +1145,9 @@ public class SemanticAnalyzer extends Ba
         assert ast.getChildCount() == 1;
         qb.getParseInfo().getDestToLateralView().put(ctx_1.dest, ast);
         break;
+      case HiveParser.TOK_CTE:
+        processCTE(qb, ast);
+        break;
       default:
         skipRecursion = false;
         break;
@@ -1084,12 +1200,35 @@ public class SemanticAnalyzer extends Ba
       // This is needed for tracking the dependencies for inputs, along with their parents.
       Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo =
           new HashMap<String, ObjectPair<String, ReadEntity>>();
+
+      /*
+       * used to capture view to SQ conversions. This is used to check for
+       * recursive CTE invocations.
+       */
+      Map<String, String> sqAliasToCTEName = new HashMap<String, String>();
+
       for (String alias : tabAliases) {
         String tab_name = qb.getTabNameForAlias(alias);
         Table tab = null;
         try {
           tab = db.getTable(tab_name);
         } catch (InvalidTableException ite) {
+          /*
+           * if this s a CTE reference:
+           * Add its AST as a SubQuery to this QB.
+           */
+          ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase());
+          if ( cteNode != null ) {
+            String cte_name = tab_name.toLowerCase();
+            if (ctesExpanded.contains(cte_name)) {
+              throw new SemanticException("Recursive cte " + tab_name +
+                  " detected (cycle: " + StringUtils.join(ctesExpanded, " -> ") +
+                  " -> " + tab_name + ").");
+            }
+            addCTEAsSubQuery(qb, cte_name, alias);
+            sqAliasToCTEName.put(alias, cte_name);
+            continue;
+          }
           throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(qb
               .getParseInfo().getSrcForAlias(alias)));
         }
@@ -1182,15 +1321,20 @@ public class SemanticAnalyzer extends Ba
       // Go over the subqueries and getMetaData for these
       for (String alias : qb.getSubqAliases()) {
         boolean wasView = aliasToViewInfo.containsKey(alias);
+        boolean wasCTE = sqAliasToCTEName.containsKey(alias);
         ReadEntity newParentInput = null;
         if (wasView) {
           viewsExpanded.add(aliasToViewInfo.get(alias).getFirst());
           newParentInput = aliasToViewInfo.get(alias).getSecond();
+        } else if (wasCTE) {
+          ctesExpanded.add(sqAliasToCTEName.get(alias));
         }
         QBExpr qbexpr = qb.getSubqForAlias(alias);
         getMetaData(qbexpr, newParentInput);
         if (wasView) {
           viewsExpanded.remove(viewsExpanded.size() - 1);
+        } else if (wasCTE) {
+          ctesExpanded.remove(ctesExpanded.size() - 1);
         }
       }
 
@@ -8615,6 +8759,15 @@ public class SemanticAnalyzer extends Ba
       aliasToOpInfo.put(alias, op);
     }
 
+    if (aliasToOpInfo.isEmpty()) {
+      qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
+      TableScanOperator op = (TableScanOperator) genTablePlan(DUMMY_TABLE, qb);
+      op.getConf().setRowLimit(1);
+      qb.addAlias(DUMMY_TABLE);
+      qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
+      aliasToOpInfo.put(DUMMY_TABLE, op);
+    }
+
     Operator srcOpInfo = null;
     Operator lastPTFOp = null;
 
@@ -8696,6 +8849,37 @@ public class SemanticAnalyzer extends Ba
     return bodyOpInfo;
   }
 
+  private Table getDummyTable() throws SemanticException {
+    Path dummyPath = createDummyFile();
+    Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
+    desc.getTTable().getSd().setLocation(dummyPath.toString());
+    desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName());
+    desc.setInputFormatClass(NullRowsInputFormat.class);
+    desc.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
+    return desc;
+  }
+
+  // add dummy data for not removed by CombineHiveInputFormat, etc.
+  private Path createDummyFile() throws SemanticException {
+    Path dummyPath = new Path(ctx.getMRScratchDir(), "dummy_path");
+    Path dummyFile = new Path(dummyPath, "dummy_file");
+    FSDataOutputStream fout = null;
+    try {
+      FileSystem fs = dummyFile.getFileSystem(conf);
+      if (fs.exists(dummyFile)) {
+        return dummyPath;
+      }
+      fout = fs.create(dummyFile);
+      fout.write(1);
+      fout.close();
+    } catch (IOException e) {
+      throw new SemanticException(e);
+    } finally {
+      IOUtils.closeStream(fout);
+    }
+    return dummyPath;
+  }
+
   /**
    * Generates the operator DAG needed to implement lateral views and attaches
    * it to the TS operator.
@@ -8867,6 +9051,7 @@ public class SemanticAnalyzer extends Ba
     ASTNode child = ast;
     this.ast = ast;
     viewsExpanded = new ArrayList<String>();
+    ctesExpanded = new ArrayList<String>();
 
     LOG.info("Starting Semantic Analysis");
 
@@ -9555,7 +9740,7 @@ public class SemanticAnalyzer extends Ba
 
         break;
       default:
-        assert false;
+        throw new AssertionError("Unknown token: " + child.getToken());
       }
     }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Fri Feb 14 10:53:31 2014
@@ -96,6 +96,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_GRANT_ROLE, HiveOperation.GRANT_ROLE);
     commandType.put(HiveParser.TOK_REVOKE_ROLE, HiveOperation.REVOKE_ROLE);
     commandType.put(HiveParser.TOK_SHOW_ROLES, HiveOperation.SHOW_ROLES);
+    commandType.put(HiveParser.TOK_SHOW_SET_ROLE, HiveOperation.SHOW_ROLES);
     commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT);
     commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE);
     commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE);
@@ -215,6 +216,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_ALTERTABLE_SKEWED:
       case HiveParser.TOK_TRUNCATETABLE:
       case HiveParser.TOK_EXCHANGEPARTITION:
+      case HiveParser.TOK_SHOW_SET_ROLE:
         return new DDLSemanticAnalyzer(conf);
       case HiveParser.TOK_ALTERTABLE_PARTITION:
         HiveOperation commandType = null;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompilerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompilerFactory.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompilerFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompilerFactory.java Fri Feb 14 10:53:31 2014
@@ -35,8 +35,7 @@ public class TaskCompilerFactory {
    * into executable units.
    */
   public static TaskCompiler getCompiler(HiveConf conf, ParseContext parseContext) {
-    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")
-        && !parseContext.getQB().getParseInfo().isAnalyzeCommand()) {
+    if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
       return new TezCompiler();
     } else {
       return new MapReduceCompiler();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java Fri Feb 14 10:53:31 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.exec.Jo
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
@@ -109,8 +110,10 @@ public class TezCompiler extends TaskCom
       List<Task<MoveWork>> mvTask, Set<ReadEntity> inputs, Set<WriteEntity> outputs)
       throws SemanticException {
 
+    GenTezUtils.getUtils().resetSequenceNumber();
+
     ParseContext tempParseContext = getParseContext(pCtx, rootTasks);
-    GenTezWork genTezWork = new GenTezWork();
+    GenTezWork genTezWork = new GenTezWork(GenTezUtils.getUtils());
 
     GenTezProcContext procCtx = new GenTezProcContext(
         conf, tempParseContext, mvTask, rootTasks, inputs, outputs);
@@ -131,6 +134,10 @@ public class TezCompiler extends TaskCom
         FileSinkOperator.getOperatorName() + "%"),
         new CompositeProcessor(new FileSinkProcessor(), genTezWork));
 
+    opRules.put(new RuleRegExp("Handle Potential Analyze Command",
+        TableScanOperator.getOperatorName() + "%"),
+        new ProcessAnalyzeTable(GenTezUtils.getUtils()));
+
     opRules.put(new RuleRegExp("Bail on Union",
         UnionOperator.getOperatorName() + "%"), new NodeProcessor()
     {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java Fri Feb 14 10:53:31 2014
@@ -56,4 +56,10 @@ public interface HiveAuthorizationTaskFa
 
   public Task<? extends Serializable> createRevokeTask(ASTNode node, HashSet<ReadEntity> inputs,
       HashSet<WriteEntity> outputs) throws SemanticException;
+
+  public Task<? extends Serializable> createSetRoleTask(String roleName,
+      HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException;
+
+  public Task<? extends Serializable> createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
+      HashSet<WriteEntity> outputs, Path resFile) throws SemanticException;
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Fri Feb 14 10:53:31 2014
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.plan.Ro
 import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 /**
  * Default implementation of HiveAuthorizationTaskFactory
@@ -235,9 +236,10 @@ public class HiveAuthorizationTaskFactor
     //check if admin option has been specified
     int rolesStartPos = 1;
     ASTNode wAdminOption = (ASTNode) ast.getChild(1);
+    boolean isAdmin = false;
     if(wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION){
       rolesStartPos = 2; //start reading role names from next postion
-      //TODO: use the admin option
+      isAdmin = true;
     }
 
     List<String> roles = new ArrayList<String>();
@@ -252,7 +254,7 @@ public class HiveAuthorizationTaskFactor
     }
 
     //until change is made to use the admin option. Default to false with V2 authorization
-    boolean isAdmin = SessionState.get().isAuthorizationModeV2() ? false : true;
+
 
     GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant,
         roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
@@ -322,7 +324,8 @@ public class HiveAuthorizationTaskFactor
       Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeType.getType());
 
       if (privObj == null) {
-        throw new SemanticException("undefined privilege " + privilegeType.getType());
+        throw new SemanticException("Undefined privilege " + PrivilegeType.
+            getPrivTypeByToken(privilegeType.getType()));
       }
       List<String> cols = null;
       if (privilegeDef.getChildCount() > 1) {
@@ -370,4 +373,21 @@ public class HiveAuthorizationTaskFactor
   private String toMessage(ErrorMsg message, Object detail) {
     return detail == null ? message.getMsg() : message.getMsg(detail.toString());
   }
+
+  @Override
+  public Task<? extends Serializable> createSetRoleTask(String roleName,
+      HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs)
+      throws SemanticException {
+    return TaskFactory.get(new DDLWork(inputs, outputs, new RoleDDLDesc(roleName,
+      RoleDDLDesc.RoleOperation.SET_ROLE)), conf);
+  }
+
+  @Override
+  public Task<? extends Serializable> createShowCurrentRoleTask(
+      HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, Path resFile)
+      throws SemanticException {
+    RoleDDLDesc ddlDesc = new RoleDDLDesc(null, RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE);
+    ddlDesc.setResFile(resFile.toString());
+    return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc), conf);
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java Fri Feb 14 10:53:31 2014
@@ -21,23 +21,38 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 /**
  * DropTableDesc.
- *
+ * TODO: this is currently used for both drop table and drop partitions.
  */
 @Explain(displayName = "Drop Table")
 public class DropTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
+  public static class PartSpec {
+    public PartSpec(ExprNodeGenericFuncDesc partSpec, int prefixLength) {
+      this.partSpec = partSpec;
+      this.prefixLength = prefixLength;
+    }
+    public ExprNodeGenericFuncDesc getPartSpec() {
+      return partSpec;
+    }
+    public int getPrefixLength() {
+      return prefixLength;
+    }
+    private static final long serialVersionUID = 1L;
+    private ExprNodeGenericFuncDesc partSpec;
+    // TODO: see if we can get rid of this... used in one place to distinguish archived parts
+    private int prefixLength;
+  }
+
   String tableName;
-  ArrayList<PartitionSpec> partSpecs;
+  ArrayList<PartSpec> partSpecs;
   boolean expectView;
   boolean ifExists;
   boolean ignoreProtection;
-  boolean stringPartitionColumns; // This is due to JDO not working very well with
-                                  // non-string partition columns.
-                                  // We need a different codepath for them
 
   public DropTableDesc() {
   }
@@ -45,28 +60,26 @@ public class DropTableDesc extends DDLDe
   /**
    * @param tableName
    */
-  public DropTableDesc(String tableName, boolean expectView,
-                       boolean ifExists, boolean stringPartitionColumns) {
+  public DropTableDesc(String tableName, boolean expectView, boolean ifExists) {
     this.tableName = tableName;
-    partSpecs = null;
+    this.partSpecs = null;
     this.expectView = expectView;
     this.ifExists = ifExists;
     this.ignoreProtection = false;
-    this.stringPartitionColumns = stringPartitionColumns;
   }
 
-  public DropTableDesc(String tableName, List<PartitionSpec> partSpecs,
-                       boolean expectView, boolean stringPartitionColumns,
-                       boolean ignoreProtection) {
-
+  public DropTableDesc(String tableName, Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs,
+      boolean expectView, boolean ignoreProtection) {
     this.tableName = tableName;
-    this.partSpecs = new ArrayList<PartitionSpec>(partSpecs.size());
-    for (int i = 0; i < partSpecs.size(); i++) {
-      this.partSpecs.add(partSpecs.get(i));
+    this.partSpecs = new ArrayList<PartSpec>(partSpecs.size());
+    for (Map.Entry<Integer, List<ExprNodeGenericFuncDesc>> partSpec : partSpecs.entrySet()) {
+      int prefixLength = partSpec.getKey();
+      for (ExprNodeGenericFuncDesc expr : partSpec.getValue()) {
+        this.partSpecs.add(new PartSpec(expr, prefixLength));
+      }
     }
     this.ignoreProtection = ignoreProtection;
     this.expectView = expectView;
-    this.stringPartitionColumns = stringPartitionColumns;
   }
 
   /**
@@ -88,19 +101,11 @@ public class DropTableDesc extends DDLDe
   /**
    * @return the partSpecs
    */
-  public ArrayList<PartitionSpec> getPartSpecs() {
+  public ArrayList<PartSpec> getPartSpecs() {
     return partSpecs;
   }
 
   /**
-   * @param partSpecs
-   *          the partSpecs to set
-   */
-  public void setPartSpecs(ArrayList<PartitionSpec> partSpecs) {
-    this.partSpecs = partSpecs;
-  }
-
-  /**
    * @return whether or not protection will be ignored for the partition
    */
   public boolean getIgnoreProtection() {
@@ -144,12 +149,4 @@ public class DropTableDesc extends DDLDe
   public void setIfExists(boolean ifExists) {
     this.ifExists = ifExists;
   }
-
-  public boolean isStringPartitionColumns() {
-    return stringPartitionColumns;
-  }
-
-  public void setStringPartitionColumns(boolean stringPartitionColumns) {
-    this.stringPartitionColumns = stringPartitionColumns;
-  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java Fri Feb 14 10:53:31 2014
@@ -28,28 +28,40 @@ public class RoleDDLDesc extends DDLDesc
   private static final long serialVersionUID = 1L;
 
   private String name;
-  
+
   private PrincipalType principalType;
-  
+
   private boolean group;
 
   private RoleOperation operation;
-  
+
   private String resFile;
-  
+
   private String roleOwnerName;
 
   /**
+   * thrift ddl for the result of show roles.
+   */
+  private static final String roleNameSchema = "role#string";
+
+  /**
    * thrift ddl for the result of show role.
    */
-  private static String schema = "role#string";
+  private static final String roleDescSchema =
+      "role,create_time,principal_name,principal_type,grant_option,grant_time,grantor#" +
+      "string:bigint:string:string:boolean:bigint:string";
 
-  public static String getSchema() {
-    return schema;
+  public static String getRoleNameSchema() {
+    return roleNameSchema;
+  }
+
+  public static String getRoleDescSchema() {
+    return roleDescSchema;
   }
 
   public static enum RoleOperation {
-    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"), SHOW_ROLES("show_roles");
+    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"),
+    SHOW_ROLES("show_roles"), SET_ROLE("set_role"), SHOW_CURRENT_ROLE("show_current_role");
     private String operationName;
 
     private RoleOperation() {
@@ -63,11 +75,12 @@ public class RoleDDLDesc extends DDLDesc
       return operationName;
     }
 
+    @Override
     public String toString () {
       return this.operationName;
     }
   }
-  
+
   public RoleDDLDesc(){
   }
 
@@ -91,7 +104,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setName(String roleName) {
     this.name = roleName;
   }
-  
+
   @Explain(displayName = "role operation")
   public RoleOperation getOperation() {
     return operation;
@@ -100,7 +113,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setOperation(RoleOperation operation) {
     this.operation = operation;
   }
-  
+
   public PrincipalType getPrincipalType() {
     return principalType;
   }
@@ -116,7 +129,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setGroup(boolean group) {
     this.group = group;
   }
-  
+
   public String getResFile() {
     return resFile;
   }
@@ -124,7 +137,7 @@ public class RoleDDLDesc extends DDLDesc
   public void setResFile(String resFile) {
     this.resFile = resFile;
   }
-  
+
   public String getRoleOwnerName() {
     return roleOwnerName;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java Fri Feb 14 10:53:31 2014
@@ -33,7 +33,10 @@ public class ShowGrantDesc {
   /**
    * thrift ddl for the result of show grant.
    */
-  private static final String schema = "property,value#string:string";
+  private static final String tabularSchema =
+      "database,table,partition,column,principal_name,principal_type,privilege," +
+      "grant_option,grant_time,grantor#" +
+      "string:string:string:string:string:string:string:boolean:bigint:string";
 
   public ShowGrantDesc(){
   }
@@ -47,7 +50,7 @@ public class ShowGrantDesc {
   }
 
   public static String getSchema() {
-    return schema;
+    return tabularSchema;
   }
 
   @Explain(displayName="principal desc")
@@ -83,5 +86,4 @@ public class ShowGrantDesc {
   public void setColumns(List<String> columns) {
     this.columns = columns;
   }
-  
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Fri Feb 14 10:53:31 2014
@@ -96,6 +96,12 @@ public class TableScanDesc extends Abstr
   }
 
   @Explain(displayName = "filterExpr")
+  public String getFilterExprString() {
+    StringBuffer sb = new StringBuffer();
+    PlanUtils.addExprToStringBuffer(filterExpr, sb);
+    return sb.toString();
+  }
+
   public ExprNodeGenericFuncDesc getFilterExpr() {
     return filterExpr;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Fri Feb 14 10:53:31 2014
@@ -45,13 +45,13 @@ public final class CommandProcessorFacto
 
   public static CommandProcessor get(String cmd)
       throws SQLException {
-    return get(cmd, null);
+    return get(new String[]{cmd}, null);
   }
 
-  public static CommandProcessor getForHiveCommand(String cmd, HiveConf conf)
+  public static CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf)
       throws SQLException {
     HiveCommand hiveCommand = HiveCommand.find(cmd);
-    if (hiveCommand == null || isBlank(cmd)) {
+    if (hiveCommand == null || isBlank(cmd[0])) {
       return null;
     }
     if (conf == null) {
@@ -61,8 +61,8 @@ public final class CommandProcessorFacto
     for (String availableCommand : conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST).split(",")) {
       availableCommands.add(availableCommand.toLowerCase().trim());
     }
-    if (!availableCommands.contains(cmd.trim().toLowerCase())) {
-      throw new SQLException("Insufficient privileges to execute " + cmd, "42000");
+    if (!availableCommands.contains(cmd[0].trim().toLowerCase())) {
+      throw new SQLException("Insufficient privileges to execute " + cmd[0], "42000");
     }
     switch (hiveCommand) {
       case SET:
@@ -83,13 +83,13 @@ public final class CommandProcessorFacto
     }
   }
 
-  public static CommandProcessor get(String cmd, HiveConf conf)
+  public static CommandProcessor get(String[] cmd, HiveConf conf)
       throws SQLException {
     CommandProcessor result = getForHiveCommand(cmd, conf);
     if (result != null) {
       return result;
     }
-    if (isBlank(cmd)) {
+    if (isBlank(cmd[0])) {
       return null;
     } else {
       if (conf == null) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java Fri Feb 14 10:53:31 2014
@@ -38,11 +38,18 @@ public enum HiveCommand {
       COMMANDS.add(command.name());
     }
   }
-  public static HiveCommand find(String command) {
-    if (command != null) {
-      command = command.trim().toUpperCase();
-      if (COMMANDS.contains(command)) {
-        return HiveCommand.valueOf(command);
+  public static HiveCommand find(String[] command) {
+    if (null == command){
+      return null;
+    }
+    String cmd = command[0];
+    if (cmd != null) {
+      cmd = cmd.trim().toUpperCase();
+      if (command.length > 1 && "role".equalsIgnoreCase(command[1])) {
+        // special handling for set role r1 statement
+        return null;
+      } else if (COMMANDS.contains(cmd)) {
+        return HiveCommand.valueOf(cmd);
       }
     }
     return null;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java Fri Feb 14 10:53:31 2014
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -30,7 +31,7 @@ public class HadoopDefaultAuthenticator 
 
   protected String userName;
   protected List<String> groupNames;
-  
+
   protected Configuration conf;
 
   @Override
@@ -74,4 +75,9 @@ public class HadoopDefaultAuthenticator 
     return this.conf;
   }
 
+  @Override
+  public void setSessionState(SessionState ss) {
+    //no op
+  }
+
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java Fri Feb 14 10:53:31 2014
@@ -25,7 +25,7 @@ public class HadoopDefaultMetastoreAuthe
 
   @Override
   public void setMetaStoreHandler(HMSHandler handler) {
-    setConf(handler.getHiveConf());
+    setConf(handler.getConf());
   }
 
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java?rev=1568236&r1=1568235&r2=1568236&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java Fri Feb 14 10:53:31 2014
@@ -22,17 +22,20 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
  * HiveAuthenticationProvider is an interface for authentication. The
  * implementation should return userNames and groupNames.
  */
 public interface HiveAuthenticationProvider extends Configurable{
-  
+
   public String getUserName();
-  
+
   public List<String> getGroupNames();
-  
+
   public void destroy() throws HiveException;
 
+  public void setSessionState(SessionState ss);
+
 }