You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/10/23 22:50:43 UTC

svn commit: r1535174 [5/8] - in /hive/branches/tez: ./ ant/src/org/apache/hadoop/hive/ant/ common/src/java/conf/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/impl/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/ hc...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Oct 23 20:50:38 2013
@@ -99,6 +99,7 @@ import org.apache.hadoop.hive.ql.parse.P
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionSpec;
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionedTableFunctionSpec;
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec;
+import org.apache.hadoop.hive.ql.parse.QBSubQuery.SubQueryType;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.CurrentRowSpec;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
@@ -1758,11 +1759,105 @@ public class SemanticAnalyzer extends Ba
   }
 
   @SuppressWarnings("nls")
-  private Operator genFilterPlan(String dest, QB qb, Operator input)
+  private Operator genFilterPlan(String dest, QB qb, Operator input,
+      Map<String, Operator> aliasToOpInfo)
       throws SemanticException {
 
+    OpParseContext inputCtx = opParseCtx.get(input);
+    RowResolver inputRR = inputCtx.getRowResolver();
     ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest);
-    return genFilterPlan(qb, (ASTNode) whereExpr.getChild(0), input);
+
+    /*
+     * Handling of SubQuery Expressions:
+     * if "Where clause contains no SubQuery expressions" then
+     *   -->[true] ===CONTINUE_FILTER_PROCESSING===
+     * else
+     *   -->[false] "extract SubQuery expressions\n from Where clause"
+     *   if "this is a nested SubQuery or \nthere are more than 1 SubQuery expressions" then
+     *     -->[yes] "throw Unsupported Error"
+     *   else
+     *     --> "Rewrite Search condition to \nremove SubQuery predicate"
+     *      --> "build QBSubQuery"
+     *        --> "extract correlated predicates \nfrom Where Clause"
+     *        --> "add correlated Items to \nSelect List and Group By"
+     *        --> "construct Join Predicate \nfrom correlation predicates"
+     *     --> "Generate Plan for\n modified SubQuery"
+     *     --> "Build the Join Condition\n for Parent Query to SubQuery join"
+     *     --> "Build the QBJoinTree from the Join condition"
+     *     --> "Update Parent Query Filter\n with any Post Join conditions"
+     *     --> ===CONTINUE_FILTER_PROCESSING===
+     *   endif
+     * endif
+     */
+    ASTNode searchCond = (ASTNode) whereExpr.getChild(0);
+    List<ASTNode> subQueriesInOriginalTree = SubQueryUtils.findSubQueries(searchCond);
+
+    if ( subQueriesInOriginalTree != null ) {
+
+      /*
+       * Restriction.8.m :: We allow only 1 SubQuery expression per Query.
+       */
+      if (subQueriesInOriginalTree.size() > 1 ) {
+
+        throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
+            subQueriesInOriginalTree.get(1), "Only 1 SubQuery expression is supported."));
+      }
+
+      /*
+       * Clone the Search AST; apply all rewrites on the clone.
+       */
+      ASTNode clonedSearchCond = (ASTNode) ParseDriver.adaptor.dupTree(searchCond);
+      List<ASTNode> subQueries = SubQueryUtils.findSubQueries(clonedSearchCond);
+
+      for(int i=0; i < subQueries.size(); i++) {
+        ASTNode subQueryAST = subQueries.get(i);
+        ASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i);
+
+        int sqIdx = i+1;
+        clonedSearchCond = SubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST);
+
+        QBSubQuery subQuery = SubQueryUtils.buildSubQuery(qb.getId(),
+            sqIdx, subQueryAST, originalSubQueryAST, ctx);
+
+        subQuery.validateAndRewriteAST(inputRR);
+
+        QB qbSQ = new QB(subQuery.getOuterQueryId(), subQuery.getAlias(), true);
+        Phase1Ctx ctx_1 = initPhase1Ctx();
+        doPhase1(subQuery.getSubQueryAST(), qbSQ, ctx_1);
+        getMetaData(qbSQ);
+        Operator sqPlanTopOp = genPlan(qbSQ);
+        aliasToOpInfo.put(subQuery.getAlias(), sqPlanTopOp);
+        RowResolver sqRR = opParseCtx.get(sqPlanTopOp).getRowResolver();
+
+        /*
+         * Check.5.h :: For In and Not In the SubQuery must implicitly or
+         * explicitly only contain one select item.
+         */
+        if ( subQuery.getOperator().getType() != SubQueryType.EXISTS &&
+            subQuery.getOperator().getType() != SubQueryType.NOT_EXISTS &&
+            sqRR.getColumnInfos().size() -
+               subQuery.getNumOfCorrelationExprsAddedToSQSelect() > 1 ) {
+          throw new SemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(
+              subQueryAST, "SubQuery can contain only 1 item in Select List."));
+        }
+
+        /*
+         * Gen Join between outer Operator and SQ op
+         */
+        subQuery.buildJoinCondition(inputRR, sqRR);
+        QBJoinTree joinTree = genSQJoinTree(qb, subQuery,
+            input,
+            aliasToOpInfo);
+        /*
+         * push filters only for this QBJoinTree. Child QBJoinTrees have already been handled.
+         */
+        pushJoinFilters(qb, joinTree, aliasToOpInfo, false);
+        input = genJoinOperator(qbSQ, joinTree, aliasToOpInfo, input);
+        searchCond = subQuery.updateOuterQueryFilter(clonedSearchCond);
+      }
+    }
+
+    return genFilterPlan(qb, searchCond, input);
   }
 
   /**
@@ -3410,7 +3505,6 @@ public class SemanticAnalyzer extends Ba
       ASTNode value = entry.getValue();
       String aggName = unescapeIdentifier(value.getChild(0).getText());
       ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
-      new ArrayList<Class<?>>();
       // 0 is the function name
       for (int i = 1; i < value.getChildCount(); i++) {
         ASTNode paraExpr = (ASTNode) value.getChild(i);
@@ -3436,8 +3530,10 @@ public class SemanticAnalyzer extends Ba
       String field = getColumnInternalName(groupByKeys.size()
           + aggregations.size() - 1);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.putExpression(value, new ColumnInfo(
-          field, udaf.returnType, "", false));
+      if (groupByOutputRowResolver.getExpression(value) == null) {
+        groupByOutputRowResolver.putExpression(value, new ColumnInfo(
+            field, udaf.returnType, "", false));
+      }
       // Save the evaluator so that it can be used by the next-stage
       // GroupByOperators
       if (genericUDAFEvaluators != null) {
@@ -3618,17 +3714,17 @@ public class SemanticAnalyzer extends Ba
           }
           // add the expr to reduceKeys if it is not present
           if (ri == reduceKeys.size()) {
+            String name = getColumnInternalName(numExprs);
+            String field = Utilities.ReduceField.KEY.toString() + "." + colName
+                + ":" + i
+                + "." + name;
+            ColumnInfo colInfo = new ColumnInfo(field, expr.getTypeInfo(), null, false);
+            reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
+            colExprMap.put(field, expr);
             reduceKeys.add(expr);
           }
           // add the index of expr in reduceKeys to distinctIndices
           distinctIndices.add(ri);
-          String name = getColumnInternalName(numExprs);
-          String field = Utilities.ReduceField.KEY.toString() + "." + colName
-              + ":" + i
-              + "." + name;
-          ColumnInfo colInfo = new ColumnInfo(field, expr.getTypeInfo(), null, false);
-          reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
-          colExprMap.put(field, expr);
           numExprs++;
         }
         distinctColIndices.add(distinctIndices);
@@ -4038,7 +4134,8 @@ public class SemanticAnalyzer extends Ba
   }
 
   @SuppressWarnings({"nls"})
-  private Operator genGroupByPlan1ReduceMultiGBY(List<String> dests, QB qb, Operator input)
+  private Operator genGroupByPlan1ReduceMultiGBY(List<String> dests, QB qb, Operator input,
+      Map<String, Operator> aliasToOpInfo)
       throws SemanticException {
 
     QBParseInfo parseInfo = qb.getParseInfo();
@@ -4118,7 +4215,7 @@ public class SemanticAnalyzer extends Ba
       curr = forwardOp;
 
       if (parseInfo.getWhrForClause(dest) != null) {
-        curr = genFilterPlan(dest, qb, forwardOp);
+        curr = genFilterPlan(dest, qb, forwardOp, aliasToOpInfo);
       }
 
       // Generate GroupbyOperator
@@ -6035,17 +6132,21 @@ public class SemanticAnalyzer extends Ba
   }
 
   private Operator genJoinOperator(QB qb, QBJoinTree joinTree,
-      Map<String, Operator> map) throws SemanticException {
+      Map<String, Operator> map,
+      Operator joiningOp) throws SemanticException {
     QBJoinTree leftChild = joinTree.getJoinSrc();
-    Operator joinSrcOp = null;
-    if (leftChild != null) {
-      Operator joinOp = genJoinOperator(qb, leftChild, map);
+    Operator joinSrcOp = joiningOp instanceof JoinOperator ? joiningOp : null;
+
+    if (joinSrcOp == null && leftChild != null) {
+      joinSrcOp = genJoinOperator(qb, leftChild, map, null);
+    }
+
+    if ( joinSrcOp != null ) {
       ArrayList<ASTNode> filter = joinTree.getFiltersForPushing().get(0);
       for (ASTNode cond : filter) {
-        joinOp = genFilterPlan(qb, cond, joinOp);
+        joinSrcOp = genFilterPlan(qb, cond, joinSrcOp);
       }
-
-      joinSrcOp = genJoinReduceSinkChild(qb, joinTree, joinOp, null, 0);
+      joinSrcOp = genJoinReduceSinkChild(qb, joinTree, joinSrcOp, null, 0);
     }
 
     Operator[] srcOps = new Operator[joinTree.getBaseSrc().length];
@@ -6240,7 +6341,7 @@ public class SemanticAnalyzer extends Ba
   private Operator genJoinPlan(QB qb, Map<String, Operator> map)
       throws SemanticException {
     QBJoinTree joinTree = qb.getQbJoinTree();
-    Operator joinOp = genJoinOperator(qb, joinTree, map);
+    Operator joinOp = genJoinOperator(qb, joinTree, map, null);
     return joinOp;
   }
 
@@ -6250,8 +6351,20 @@ public class SemanticAnalyzer extends Ba
    */
   private void pushJoinFilters(QB qb, QBJoinTree joinTree,
       Map<String, Operator> map) throws SemanticException {
-    if (joinTree.getJoinSrc() != null) {
-      pushJoinFilters(qb, joinTree.getJoinSrc(), map);
+    pushJoinFilters(qb, joinTree, map, true);
+  }
+
+  /**
+   * Extract the filters from the join condition and push them on top of the
+   * source operators. This procedure traverses the query tree recursively,
+   */
+  private void pushJoinFilters(QB qb, QBJoinTree joinTree,
+      Map<String, Operator> map,
+      boolean recursively) throws SemanticException {
+    if ( recursively ) {
+      if (joinTree.getJoinSrc() != null) {
+        pushJoinFilters(qb, joinTree.getJoinSrc(), map);
+      }
     }
     ArrayList<ArrayList<ASTNode>> filters = joinTree.getFiltersForPushing();
     int pos = 0;
@@ -6404,6 +6517,134 @@ public class SemanticAnalyzer extends Ba
     return joinTree;
   }
 
+  /*
+   * Setup a QBJoinTree between a SubQuery and its Parent Query. The Parent Query
+   * is the lhs of the Join.
+   *
+   * The Parent Query is represented by the last Operator needed to process its From Clause.
+   * In case of a single table Query this will be a TableScan, but it can be a Join Operator
+   * if the Parent Query contains Join clauses, or in case of a single source from clause,
+   * the source could be a SubQuery or a PTF invocation.
+   *
+   * We setup the QBJoinTree with the above constrains in place. So:
+   * - the lhs of the QBJoinTree can be a another QBJoinTree if the Parent Query operator
+   *   is a JoinOperator. In this case we get its QBJoinTree from the 'joinContext'
+   * - the rhs is always a reference to the SubQuery. Its alias is obtained from the
+   *   QBSubQuery object.
+   *
+   * The QBSubQuery also provides the Joining Condition AST. The Joining Condition has been
+   * transformed in QBSubQuery setup, before this call. The Joining condition has any correlated
+   * predicates and a predicate for joining the Parent Query expression with the SubQuery.
+   *
+   * The QBSubQuery also specifies what kind of Join to construct.
+   *
+   * Given this information, once we initialize the QBJoinTree, we call the 'parseJoinCondition'
+   * method to validate and parse Join conditions.
+   */
+  private QBJoinTree genSQJoinTree(QB qb, QBSubQuery subQuery,
+      Operator joiningOp,
+      Map<String, Operator> aliasToOpInfo)
+          throws SemanticException {
+    QBJoinTree joinTree = new QBJoinTree();
+    JoinCond[] condn = new JoinCond[1];
+
+    switch (subQuery.getJoinType()) {
+    case LEFTOUTER:
+      joinTree.setNoOuterJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.LEFTOUTER);
+      break;
+    case RIGHTOUTER:
+      joinTree.setNoOuterJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.RIGHTOUTER);
+      break;
+    case FULLOUTER:
+      joinTree.setNoOuterJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.FULLOUTER);
+      break;
+    case LEFTSEMI:
+      joinTree.setNoSemiJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.LEFTSEMI);
+      break;
+    default:
+      condn[0] = new JoinCond(0, 1, JoinType.INNER);
+      joinTree.setNoOuterJoin(true);
+      break;
+    }
+    joinTree.setJoinCond(condn);
+
+    if ( joiningOp instanceof JoinOperator ) {
+      QBJoinTree leftTree = joinContext.get(joiningOp);
+      joinTree.setJoinSrc(leftTree);
+      String[] leftChildAliases = leftTree.getLeftAliases();
+      String leftAliases[] = new String[leftChildAliases.length + 1];
+      for (int i = 0; i < leftChildAliases.length; i++) {
+        leftAliases[i] = leftChildAliases[i];
+      }
+      leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
+      joinTree.setLeftAliases(leftAliases);
+    } else {
+      String alias = unescapeIdentifier(
+          SubQueryUtils.getAlias(joiningOp, aliasToOpInfo).toLowerCase());
+      joinTree.setLeftAlias(alias);
+      String[] leftAliases = new String[1];
+      leftAliases[0] = alias;
+      joinTree.setLeftAliases(leftAliases);
+      String[] children = new String[2];
+      children[0] = alias;
+      joinTree.setBaseSrc(children);
+      joinTree.setId(qb.getId());
+      joinTree.getAliasToOpInfo().put(
+          getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
+    }
+
+    String rightalias = unescapeIdentifier(subQuery.getAlias().toLowerCase());
+    String[] rightAliases = new String[1];
+    rightAliases[0] = rightalias;
+    joinTree.setRightAliases(rightAliases);
+    String[] children = joinTree.getBaseSrc();
+    if (children == null) {
+      children = new String[2];
+    }
+    children[1] = rightalias;
+    joinTree.setBaseSrc(children);
+    joinTree.setId(qb.getId());
+    joinTree.getAliasToOpInfo().put(
+        getModifiedAlias(qb, rightalias), aliasToOpInfo.get(rightalias));
+    // remember rhs table for semijoin
+    if (joinTree.getNoSemiJoin() == false) {
+      joinTree.addRHSSemijoin(rightalias);
+    }
+
+    ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<ArrayList<ASTNode>>();
+    expressions.add(new ArrayList<ASTNode>());
+    expressions.add(new ArrayList<ASTNode>());
+    joinTree.setExpressions(expressions);
+
+    ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
+    joinTree.setNullSafes(nullsafes);
+
+    ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
+    filters.add(new ArrayList<ASTNode>());
+    filters.add(new ArrayList<ASTNode>());
+    joinTree.setFilters(filters);
+    joinTree.setFilterMap(new int[2][]);
+
+    ArrayList<ArrayList<ASTNode>> filtersForPushing =
+        new ArrayList<ArrayList<ASTNode>>();
+    filtersForPushing.add(new ArrayList<ASTNode>());
+    filtersForPushing.add(new ArrayList<ASTNode>());
+    joinTree.setFiltersForPushing(filtersForPushing);
+
+    ASTNode joinCond = subQuery.getJoinConditionAST();
+    ArrayList<String> leftSrc = new ArrayList<String>();
+    parseJoinCondition(joinTree, joinCond, leftSrc);
+    if (leftSrc.size() == 1) {
+      joinTree.setLeftAlias(leftSrc.get(0));
+    }
+
+    return joinTree;
+  }
+
   private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree,
       Map<String, Operator> aliasToOpInfo)
       throws SemanticException {
@@ -7194,7 +7435,8 @@ public class SemanticAnalyzer extends Ba
   }
 
   @SuppressWarnings("nls")
-  private Operator genBodyPlan(QB qb, Operator input) throws SemanticException {
+  private Operator genBodyPlan(QB qb, Operator input, Map<String, Operator> aliasToOpInfo)
+      throws SemanticException {
     QBParseInfo qbp = qb.getParseInfo();
 
     TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
@@ -7290,7 +7532,7 @@ public class SemanticAnalyzer extends Ba
               curr = inputs.get(dest);
 
               if (qbp.getWhrForClause(dest) != null) {
-                curr = genFilterPlan(dest, qb, curr);
+                curr = genFilterPlan(dest, qb, curr, aliasToOpInfo);
               }
 
               if (qbp.getAggregationExprsForClause(dest).size() != 0
@@ -7320,7 +7562,7 @@ public class SemanticAnalyzer extends Ba
               curr = genPostGroupByBodyPlan(curr, dest, qb);
             }
           } else {
-            curr = genGroupByPlan1ReduceMultiGBY(commonGroupByDestGroup, qb, input);
+            curr = genGroupByPlan1ReduceMultiGBY(commonGroupByDestGroup, qb, input, aliasToOpInfo);
           }
         }
       }
@@ -8124,7 +8366,7 @@ public class SemanticAnalyzer extends Ba
       srcOpInfo = lastPTFOp != null ? lastPTFOp : srcOpInfo;
     }
 
-    Operator bodyOpInfo = genBodyPlan(qb, srcOpInfo);
+    Operator bodyOpInfo = genBodyPlan(qb, srcOpInfo, aliasToOpInfo);
 
     if (LOG.isDebugEnabled()) {
       LOG.debug("Created Plan for Query Block " + qb.getId());

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Wed Oct 23 20:50:38 2013
@@ -70,6 +70,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
+
 /**
  * The Factory for creating typecheck processors. The typecheck processors are
  * used to processes the syntax trees for expressions and convert them into
@@ -156,13 +157,16 @@ public final class TypeCheckProcFactory 
         + "%|" + HiveParser.KW_IF + "%|" + HiveParser.KW_CASE + "%|"
         + HiveParser.KW_WHEN + "%|" + HiveParser.KW_IN + "%|"
         + HiveParser.KW_ARRAY + "%|" + HiveParser.KW_MAP + "%|"
-        + HiveParser.KW_STRUCT + "%"),
+        + HiveParser.KW_STRUCT + "%|" + HiveParser.KW_EXISTS + "%|"
+        + HiveParser.TOK_SUBQUERY_OP_NOTIN + "%"),
         getStrExprProcessor());
     opRules.put(new RuleRegExp("R4", HiveParser.KW_TRUE + "%|"
         + HiveParser.KW_FALSE + "%"), getBoolExprProcessor());
     opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%"), getDateExprProcessor());
     opRules.put(new RuleRegExp("R6", HiveParser.TOK_TABLE_OR_COL + "%"),
         getColumnExprProcessor());
+    opRules.put(new RuleRegExp("R7", HiveParser.TOK_SUBQUERY_OP + "%"),
+        getSubQueryExprProcessor());
 
     // The dispatcher fires the processor corresponding to the closest matching
     // rule and passes the context along
@@ -1120,4 +1124,44 @@ public final class TypeCheckProcFactory 
   public static DefaultExprProcessor getDefaultExprProcessor() {
     return new DefaultExprProcessor();
   }
+
+  /**
+   * Processor for subquery expressions..
+   */
+  public static class SubQueryExprProcessor implements NodeProcessor {
+
+    @Override
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+        Object... nodeOutputs) throws SemanticException {
+
+      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
+      if (ctx.getError() != null) {
+        return null;
+      }
+
+      ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
+      if (desc != null) {
+        return desc;
+      }
+
+      ASTNode expr = (ASTNode) nd;
+      ASTNode sqNode = (ASTNode) expr.getParent().getChild(1);
+      /*
+       * Restriction.1.h :: SubQueries only supported in the SQL Where Clause.
+       */
+      ctx.setError(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(sqNode,
+          "Currently SubQuery expressions are only allowed as Where Clause predicates"),
+          sqNode);
+      return null;
+    }
+  }
+
+  /**
+   * Factory method to get SubQueryExprProcessor.
+   *
+   * @return DateExprProcessor.
+   */
+  public static SubQueryExprProcessor getSubQueryExprProcessor() {
+    return new SubQueryExprProcessor();
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Wed Oct 23 20:50:38 2013
@@ -33,6 +33,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
+import javax.security.auth.login.LoginException;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
@@ -49,12 +51,14 @@ import org.apache.hadoop.hive.ql.history
 import org.apache.hadoop.hive.ql.history.HiveHistoryImpl;
 import org.apache.hadoop.hive.ql.history.HiveHistoryProxyHandler;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
+import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.util.DosToUnix;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -278,6 +282,16 @@ public class SessionState {
       }
     }
 
+    // Get the following out of the way when you start the session these take a
+    // while and should be done when we start up.
+    try {
+      Hive.get(startSs.conf).setupMSC();
+      ShimLoader.getHadoopShims().getUGIForConf(startSs.conf);
+      FileSystem.get(startSs.conf);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    
     try {
       startSs.authenticator = HiveUtils.getAuthenticator(
           startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncACosDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncACosLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,8 +33,9 @@ import org.apache.hadoop.hive.serde2.io.
     + "NULL otherwise", extended = "Example:\n"
     + "  > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + "  0\n"
     + "  > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + "  NULL")
+@VectorizedExpressions({FuncACosLongToDouble.class, FuncACosDoubleToDouble.class})
 public class UDFAcos extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFAcos() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncASinDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncASinLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -32,8 +35,9 @@ import org.apache.hadoop.hive.serde2.io.
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n"
     + "  0\n"
     + "  > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + "  NULL")
+@VectorizedExpressions({FuncASinLongToDouble.class, FuncASinDoubleToDouble.class})
 public class UDFAsin extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFAsin() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java Wed Oct 23 20:50:38 2013
@@ -22,6 +22,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncATanDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncATanLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 @Description(
@@ -31,6 +34,7 @@ import org.apache.hadoop.hive.serde2.io.
         "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" +
         "  0"
     )
+@VectorizedExpressions({FuncATanLongToDouble.class, FuncATanDoubleToDouble.class})
 public class UDFAtan extends UDF {
 
   @SuppressWarnings("unused")

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBin.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncBin;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
@@ -31,9 +33,10 @@ import org.apache.hadoop.io.Text;
     value = "_FUNC_(n) - returns n in binary",
     extended = "n is a BIGINT. Returns NULL if n is NULL.\n"
     + "Example:\n" + "  > SELECT _FUNC_(13) FROM src LIMIT 1\n" + "  '1101'")
+@VectorizedExpressions({FuncBin.class})
 public class UDFBin extends UDF {
   private final Text result = new Text();
-  private byte[] value = new byte[64];
+  private final byte[] value = new byte[64];
 
   public Text evaluate(LongWritable n) {
     if (n == null) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,9 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCeilDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCeilLongToLong;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -35,6 +38,7 @@ import org.apache.hadoop.io.LongWritable
     + "  > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
     + "  0\n"
     + "  > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + "  5")
+@VectorizedExpressions({FuncCeilLongToLong.class, FuncCeilDoubleToLong.class})
 public class UDFCeil extends UDF {
   private final LongWritable longWritable = new LongWritable();
   private final HiveDecimalWritable decimalWritable = new HiveDecimalWritable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCosDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCosLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,8 +33,9 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - returns the cosine of x (x is in radians)",
     extended = "Example:\n "
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + "  1")
+@VectorizedExpressions({FuncCosDoubleToDouble.class, FuncCosLongToDouble.class})
 public class UDFCos extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFCos() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java Wed Oct 23 20:50:38 2013
@@ -25,6 +25,8 @@ import java.util.Date;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthLong;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -40,11 +42,12 @@ import org.apache.hadoop.io.Text;
     + "'yyyy-MM-dd'.\n"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" + "  30")
+@VectorizedExpressions({VectorUDFDayOfMonthLong.class})
 public class UDFDayOfMonth extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();
 
-  private IntWritable result = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
   public UDFDayOfMonth() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDegrees.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDegrees.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDegrees.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDegrees.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncDegreesDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncDegreesLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 @Description(
@@ -30,6 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
         "  > SELECT _FUNC_(30) FROM src LIMIT 1;\n" +
         "  -1\n"
     )
+@VectorizedExpressions({FuncDegreesLongToDouble.class, FuncDegreesDoubleToDouble.class})
 public class UDFDegrees extends UDF{
 
   @SuppressWarnings("unused")

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncExpDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncExpLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,8 +33,9 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - Returns e to the power of x",
     extended = "Example:\n "
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + "  1")
+@VectorizedExpressions({FuncExpDoubleToDouble.class, FuncExpLongToDouble.class})
 public class UDFExp extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFExp() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,9 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncFloorDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncFloorLongToLong;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -35,6 +38,7 @@ import org.apache.hadoop.io.LongWritable
     + "  > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
     + "  -1\n"
     + "  > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + "  5")
+@VectorizedExpressions({FuncFloorLongToLong.class, FuncFloorDoubleToLong.class})
 public class UDFFloor extends UDF {
   private final LongWritable result = new LongWritable();
   private final HiveDecimalWritable bdResult = new HiveDecimalWritable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHex.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncHex;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringUnaryUDF.IUDFUnaryString;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -40,6 +42,7 @@ import org.apache.hadoop.io.Text;
     + "  'H1'\n"
     + "  > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n"
     + "  '46616365626F6F6B'")
+@VectorizedExpressions({FuncHex.class})
 public class UDFHex extends UDF implements IUDFUnaryString {
   private final Text result = new Text();
   private byte[] value = new byte[16];

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java Wed Oct 23 20:50:38 2013
@@ -25,6 +25,8 @@ import java.util.Date;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourLong;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -41,12 +43,13 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  12\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  12")
+@VectorizedExpressions({VectorUDFHourLong.class})
 public class UDFHour extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
   private final Calendar calendar = Calendar.getInstance();
 
-  private IntWritable result = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
   public UDFHour() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLTrim;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -31,8 +33,9 @@ import org.apache.hadoop.io.Text;
     value = "_FUNC_(str) - Removes the leading space characters from str ",
     extended = "Example:\n"
     + "  > SELECT _FUNC_('   facebook') FROM src LIMIT 1;\n" + "  'facebook'")
+@VectorizedExpressions({StringLTrim.class})
 public class UDFLTrim extends UDF {
-  private Text result = new Text();
+  private final Text result = new Text();
 
   public UDFLTrim() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLength.java Wed Oct 23 20:50:38 2013
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLength;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -32,6 +34,7 @@ import org.apache.hadoop.io.Text;
     value = "_FUNC_(str | binary) - Returns the length of str or number of bytes in binary data",
     extended = "Example:\n"
     + "  > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" + "  8")
+@VectorizedExpressions({StringLength.class})
 public class UDFLength extends UDF {
   private final IntWritable result = new IntWritable();
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java Wed Oct 23 20:50:38 2013
@@ -23,6 +23,8 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterStringColLikeStringScalar;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.Text;
 
@@ -35,6 +37,7 @@ import org.apache.hadoop.io.Text;
     extended = "Example:\n"
     + "  > SELECT a.* FROM srcpart a WHERE a.hr _FUNC_ '%2' LIMIT 1;\n"
     + "  27      val_27  2008-04-08      12")
+@VectorizedExpressions({FilterStringColLikeStringScalar.class})
 public class UDFLike extends UDF {
   private final Text lastLikePattern = new Text();
   private Pattern p = null;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,8 +33,9 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - Returns the natural logarithm of x",
     extended = "Example:\n"
     + "  > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + "  0")
+@VectorizedExpressions({FuncLnLongToDouble.class, FuncLnDoubleToDouble.class})
 public class UDFLn extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFLn() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,11 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,8 +35,10 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_([b], x) - Returns the logarithm of x with base b",
     extended = "Example:\n"
     + "  > SELECT _FUNC_(13, 13) FROM src LIMIT 1;\n" + "  1")
+@VectorizedExpressions({FuncLogWithBaseLongToDouble.class, FuncLogWithBaseDoubleToDouble.class,
+  FuncLnLongToDouble.class, FuncLnDoubleToDouble.class})
 public class UDFLog extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFLog() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog10DoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog10LongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,10 +33,11 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - Returns the logarithm of x with base 10",
     extended = "Example:\n"
     + "  > SELECT _FUNC_(10) FROM src LIMIT 1;\n" + "  1")
+@VectorizedExpressions({FuncLog10LongToDouble.class, FuncLog10DoubleToDouble.class})
 public class UDFLog10 extends UDF {
   private static double log10 = Math.log(10.0);
 
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFLog10() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog2DoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog2LongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,10 +33,11 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - Returns the logarithm of x with base 2",
     extended = "Example:\n"
     + "  > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + "  1")
+@VectorizedExpressions({FuncLog2LongToDouble.class, FuncLog2DoubleToDouble.class})
 public class UDFLog2 extends UDF {
   private static double log2 = Math.log(2.0);
 
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFLog2() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java Wed Oct 23 20:50:38 2013
@@ -25,6 +25,8 @@ import java.util.Date;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteLong;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -41,12 +43,13 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  58\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  58")
+@VectorizedExpressions({VectorUDFMinuteLong.class})
 public class UDFMinute extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
   private final Calendar calendar = Calendar.getInstance();
 
-  private IntWritable result = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
   public UDFMinute() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,19 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarDivideDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarDivideDoubleColumn;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
@@ -30,6 +43,12 @@ import org.apache.hadoop.hive.serde2.io.
  */
 @Description(name = "/", value = "a _FUNC_ b - Divide a by b", extended = "Example:\n"
     + "  > SELECT 3 _FUNC_ 2 FROM src LIMIT 1;\n" + "  1.5")
+@VectorizedExpressions({LongColDivideLongColumn.class, LongColDivideDoubleColumn.class,
+  DoubleColDivideLongColumn.class, DoubleColDivideDoubleColumn.class,
+  LongColDivideLongScalar.class, LongColDivideDoubleScalar.class,
+  DoubleColDivideLongScalar.class, DoubleColDivideDoubleScalar.class,
+  LongScalarDivideLongColumn.class, LongScalarDivideDoubleColumn.class,
+  DoubleScalarDivideLongColumn.class, DoubleScalarDivideDoubleColumn.class})
 /**
  * Note that in SQL, the return type of divide is not necessarily the same
  * as the parameters. For example, 3 / 2 = 1.5, not 1. To follow SQL, we always

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,19 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarSubtractLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractLongColumn;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -33,6 +46,12 @@ import org.apache.hadoop.io.LongWritable
  *
  */
 @Description(name = "-", value = "a _FUNC_ b - Returns the difference a-b")
+@VectorizedExpressions({LongColSubtractLongColumn.class, LongColSubtractDoubleColumn.class,
+  DoubleColSubtractLongColumn.class, DoubleColSubtractDoubleColumn.class,
+  LongColSubtractLongScalar.class, LongColSubtractDoubleScalar.class,
+  DoubleColSubtractLongScalar.class, DoubleColSubtractDoubleScalar.class,
+  LongScalarSubtractLongColumn.class, LongScalarSubtractDoubleColumn.class,
+  DoubleScalarSubtractLongColumn.class, DoubleScalarSubtractDoubleColumn.class})
 public class UDFOPMinus extends UDFBaseNumericOp {
 
   public UDFOPMinus() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,19 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarModuloLongColumn;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -33,6 +46,12 @@ import org.apache.hadoop.io.LongWritable
  *
  */
 @Description(name = "%", value = "a _FUNC_ b - Returns the remainder when dividing a by b")
+@VectorizedExpressions({LongColModuloLongColumn.class, LongColModuloDoubleColumn.class,
+  DoubleColModuloLongColumn.class, DoubleColModuloDoubleColumn.class,
+  LongColModuloLongScalar.class, LongColModuloDoubleScalar.class,
+  DoubleColModuloLongScalar.class, DoubleColModuloDoubleScalar.class,
+  LongScalarModuloLongColumn.class, LongScalarModuloDoubleColumn.class,
+  DoubleScalarModuloLongColumn.class, DoubleScalarModuloDoubleColumn.class})
 public class UDFOPMod extends UDFBaseNumericOp {
 
   public UDFOPMod() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,19 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarMultiplyLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarMultiplyLongColumn;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -33,6 +46,12 @@ import org.apache.hadoop.io.LongWritable
  *
  */
 @Description(name = "*", value = "a _FUNC_ b - Multiplies a by b")
+@VectorizedExpressions({LongColMultiplyLongColumn.class, LongColMultiplyDoubleColumn.class,
+    DoubleColMultiplyLongColumn.class, DoubleColMultiplyDoubleColumn.class,
+    LongColMultiplyLongScalar.class, LongColMultiplyDoubleScalar.class,
+    DoubleColMultiplyLongScalar.class, DoubleColMultiplyDoubleScalar.class,
+    LongScalarMultiplyLongColumn.class, LongScalarMultiplyDoubleColumn.class,
+    DoubleScalarMultiplyLongColumn.class, DoubleScalarMultiplyDoubleColumn.class})
 public class UDFOPMultiply extends UDFBaseNumericOp {
 
   public UDFOPMultiply() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java Wed Oct 23 20:50:38 2013
@@ -19,9 +19,12 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -32,6 +35,7 @@ import org.apache.hadoop.io.LongWritable
  *
  */
 @Description(name = "-", value = "_FUNC_ a - Returns -a")
+@VectorizedExpressions({LongColUnaryMinus.class, DoubleColUnaryMinus.class})
 public class UDFOPNegative extends UDFBaseNumericUnaryOp {
 
   public UDFOPNegative() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,19 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarAddLongColumn;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -38,6 +51,11 @@ import org.apache.hadoop.io.LongWritable
  * UDFRegistry.implicitConvertable method.
  */
 @Description(name = "+", value = "a _FUNC_ b - Returns a+b")
+@VectorizedExpressions({LongColAddLongColumn.class, LongColAddDoubleColumn.class,
+  DoubleColAddLongColumn.class, DoubleColAddDoubleColumn.class, LongColAddLongScalar.class,
+  LongColAddDoubleScalar.class, DoubleColAddLongScalar.class, DoubleColAddDoubleScalar.class,
+  LongScalarAddLongColumn.class, LongScalarAddDoubleColumn.class, DoubleScalarAddLongColumn.class,
+  DoubleScalarAddDoubleColumn.class})
 public class UDFOPPlus extends UDFBaseNumericOp {
 
   public UDFOPPlus() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.PosModDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.PosModLongToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -34,6 +37,7 @@ import org.apache.hadoop.io.LongWritable
  * {org.apache.hadoop.hive.ql.exec.FunctionRegistry}
  */
 @Description(name = "pmod", value = "a _FUNC_ b - Compute the positive modulo")
+@VectorizedExpressions({PosModLongToLong.class, PosModDoubleToDouble.class})
 public class UDFPosMod extends UDFBaseNumericOp {
 
   public UDFPosMod() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,9 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -33,6 +36,7 @@ import org.apache.hadoop.io.IntWritable;
     value = "_FUNC_(x1, x2) - raise x1 to the power of x2",
     extended = "Example:\n"
     + "  > SELECT _FUNC_(2, 3) FROM src LIMIT 1;\n" + "  8")
+@VectorizedExpressions({FuncPowerLongToDouble.class, FuncPowerDoubleToDouble.class})
 public class UDFPower extends UDF {
   private final DoubleWritable resultDouble = new DoubleWritable();
   private final HiveDecimalWritable resultHiveDecimal = new HiveDecimalWritable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringRTrim;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -31,6 +33,7 @@ import org.apache.hadoop.io.Text;
     value = "_FUNC_(str) - Removes the trailing space characters from str ",
     extended = "Example:\n"
     + "  > SELECT _FUNC_('facebook   ') FROM src LIMIT 1;\n" + "  'facebook'")
+@VectorizedExpressions({StringRTrim.class})
 public class UDFRTrim extends UDF {
 
   Text result = new Text();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRadiansDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRadiansLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 
@@ -31,12 +34,13 @@ import org.apache.hadoop.hive.serde2.io.
           "  > SELECT _FUNC_(90) FROM src LIMIT 1;\n" +
           "  1.5707963267949mo\n"
       )
+@VectorizedExpressions({FuncRadiansLongToDouble.class, FuncRadiansDoubleToDouble.class})
 public class UDFRadians extends UDF {
 
   @SuppressWarnings("unused")
   private static Log LOG = LogFactory.getLog(UDFRadians.class.getName());
   DoubleWritable result = new DoubleWritable();
-  
+
   public UDFRadians() {
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java Wed Oct 23 20:50:38 2013
@@ -22,6 +22,9 @@ import java.util.Random;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncRandNoSeed;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncRand;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.io.LongWritable;
 
@@ -32,10 +35,11 @@ import org.apache.hadoop.io.LongWritable
 @Description(name = "rand",
     value = "_FUNC_([seed]) - Returns a pseudorandom number between 0 and 1")
 @UDFType(deterministic = false)
+@VectorizedExpressions({FuncRandNoSeed.class, FuncRand.class})
 public class UDFRand extends UDF {
   private Random random;
 
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFRand() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java Wed Oct 23 20:50:38 2013
@@ -25,6 +25,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterStringColRegExpStringScalar;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.Text;
 
@@ -36,6 +38,7 @@ import org.apache.hadoop.io.Text;
     value = "str _FUNC_ regexp - Returns true if str matches regexp and "
     + "false otherwise", extended = "Example:\n"
     + "  > SELECT 'fb' _FUNC_ '.*' FROM src LIMIT 1;\n" + "  true")
+@VectorizedExpressions({FilterStringColRegExpStringScalar.class})
 public class UDFRegExp extends UDF {
   static final Log LOG = LogFactory.getLog(UDFRegExp.class.getName());
 
@@ -43,7 +46,7 @@ public class UDFRegExp extends UDF {
   private Pattern p = null;
   private boolean warned = false;
 
-  private BooleanWritable result = new BooleanWritable();
+  private final BooleanWritable result = new BooleanWritable();
 
   public UDFRegExp() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java Wed Oct 23 20:50:38 2013
@@ -24,6 +24,9 @@ import java.math.RoundingMode;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.RoundWithNumDigitsDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -39,6 +42,7 @@ import org.apache.hadoop.io.LongWritable
     value = "_FUNC_(x[, d]) - round x to d decimal places",
     extended = "Example:\n"
     + "  > SELECT _FUNC_(12.3456, 1) FROM src LIMIT 1;\n" + "  12.3'")
+@VectorizedExpressions({FuncRoundDoubleToDouble.class, RoundWithNumDigitsDoubleToDouble.class})
 public class UDFRound extends UDF {
   private final HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
   private final DoubleWritable doubleWritable = new DoubleWritable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java Wed Oct 23 20:50:38 2013
@@ -25,6 +25,8 @@ import java.util.Date;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondLong;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -41,13 +43,14 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  59\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  59")
+@VectorizedExpressions({VectorUDFSecondLong.class})
 public class UDFSecond extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat(
       "yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
   private final Calendar calendar = Calendar.getInstance();
 
-  private IntWritable result = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
   public UDFSecond() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSign.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSign.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSign.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSign.java Wed Oct 23 20:50:38 2013
@@ -22,6 +22,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSignDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSignLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 @Description(
@@ -31,6 +34,7 @@ import org.apache.hadoop.hive.serde2.io.
     		"  > SELECT _FUNC_(40) FROM src LIMIT 1;\n" +
     		"  1"
     )
+@VectorizedExpressions({FuncSignLongToDouble.class, FuncSignDoubleToDouble.class})
 public class UDFSign extends UDF {
 
   @SuppressWarnings("unused")

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -30,8 +33,9 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - returns the sine of x (x is in radians)",
     extended = "Example:\n "
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + "  0")
+@VectorizedExpressions({FuncSinLongToDouble.class, FuncSinDoubleToDouble.class})
 public class UDFSin extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFSin() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java Wed Oct 23 20:50:38 2013
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 /**
@@ -29,8 +32,9 @@ import org.apache.hadoop.hive.serde2.io.
     value = "_FUNC_(x) - returns the square root of x",
     extended = "Example:\n "
     + "  > SELECT _FUNC_(4) FROM src LIMIT 1;\n" + "  2")
+@VectorizedExpressions({FuncSqrtLongToDouble.class, FuncSqrtDoubleToDouble.class})
 public class UDFSqrt extends UDF {
-  private DoubleWritable result = new DoubleWritable();
+  private final DoubleWritable result = new DoubleWritable();
 
   public UDFSqrt() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java Wed Oct 23 20:50:38 2013
@@ -22,6 +22,9 @@ import java.util.Arrays;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringSubstrColStart;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringSubstrColStartLen;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -44,6 +47,7 @@ import org.apache.hadoop.io.Text;
     + "  'ebook'\n"
     + "  > SELECT _FUNC_('Facebook', 5, 1) FROM src LIMIT 1;\n"
     + "  'b'")
+@VectorizedExpressions({StringSubstrColStart.class, StringSubstrColStartLen.class})
 public class UDFSubstr extends UDF {
 
   private final int[] index;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java Wed Oct 23 20:50:38 2013
@@ -22,6 +22,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
 @Description(
@@ -31,6 +34,7 @@ import org.apache.hadoop.hive.serde2.io.
     		"  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" +
     		"  1"
     )
+@VectorizedExpressions({FuncTanLongToDouble.class, FuncTanDoubleToDouble.class})
 public class UDFTan extends UDF {
 
   @SuppressWarnings("unused")

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java Wed Oct 23 20:50:38 2013
@@ -21,6 +21,9 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -38,6 +41,8 @@ import org.apache.hadoop.io.Text;
  * UDFToBoolean.
  *
  */
+@VectorizedExpressions({CastLongToBooleanViaLongToLong.class,
+  CastDoubleToBooleanViaDoubleToLong.class})
 public class UDFToBoolean extends UDF {
   private final BooleanWritable booleanWritable = new BooleanWritable();
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java?rev=1535174&r1=1535173&r2=1535174&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java Wed Oct 23 20:50:38 2013
@@ -19,6 +19,9 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -36,6 +39,7 @@ import org.apache.hadoop.io.Text;
  * UDFToByte.
  *
  */
+@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class})
 public class UDFToByte extends UDF {
   private final ByteWritable byteWritable = new ByteWritable();