You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/23 22:00:11 UTC

svn commit: r1627140 [3/3] - in /hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/ contrib/src/java/org/apache/hadoop/h...

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java Tue Sep 23 20:00:10 2014
@@ -54,6 +54,19 @@ public class TypeCheckCtx implements Nod
 
   private boolean allowDistinctFunctions;
 
+  private final boolean allowGBExprElimination;
+
+  private final boolean allowAllColRef;
+
+  private final boolean allowFunctionStar;
+
+  private final boolean allowWindowing;
+
+  // "[]" : LSQUARE/INDEX Expression
+  private final boolean allowIndexExpr;
+
+  private final boolean allowSubQueryExpr;
+
   /**
    * Constructor.
    *
@@ -61,10 +74,23 @@ public class TypeCheckCtx implements Nod
    *          The input row resolver of the previous operator.
    */
   public TypeCheckCtx(RowResolver inputRR) {
+    this(inputRR, false, true, true, true, true, true, true, true);
+  }
+
+  public TypeCheckCtx(RowResolver inputRR, boolean allowStatefulFunctions,
+      boolean allowDistinctFunctions, boolean allowGBExprElimination, boolean allowAllColRef,
+      boolean allowFunctionStar, boolean allowWindowing,
+      boolean allowIndexExpr, boolean allowSubQueryExpr) {
     setInputRR(inputRR);
     error = null;
-    allowStatefulFunctions = false;
-    allowDistinctFunctions = true;
+    this.allowStatefulFunctions = allowStatefulFunctions;
+    this.allowDistinctFunctions = allowDistinctFunctions;
+    this.allowGBExprElimination = allowGBExprElimination;
+    this.allowAllColRef = allowAllColRef;
+    this.allowFunctionStar = allowFunctionStar;
+    this.allowWindowing = allowWindowing;
+    this.allowIndexExpr = allowIndexExpr;
+    this.allowSubQueryExpr = allowSubQueryExpr;
   }
 
   /**
@@ -98,7 +124,8 @@ public class TypeCheckCtx implements Nod
   }
 
   /**
-   * @param allowStatefulFunctions whether to allow stateful UDF invocations
+   * @param allowStatefulFunctions
+   *          whether to allow stateful UDF invocations
    */
   public void setAllowStatefulFunctions(boolean allowStatefulFunctions) {
     this.allowStatefulFunctions = allowStatefulFunctions;
@@ -136,7 +163,31 @@ public class TypeCheckCtx implements Nod
     this.allowDistinctFunctions = allowDistinctFunctions;
   }
 
-  public boolean isAllowDistinctFunctions() {
+  public boolean getAllowDistinctFunctions() {
     return allowDistinctFunctions;
   }
+
+  public boolean getAllowGBExprElimination() {
+    return allowGBExprElimination;
+  }
+
+  public boolean getallowAllColRef() {
+    return allowAllColRef;
+  }
+
+  public boolean getallowFunctionStar() {
+    return allowFunctionStar;
+  }
+
+  public boolean getallowWindowing() {
+    return allowWindowing;
+  }
+
+  public boolean getallowIndexExpr() {
+    return allowIndexExpr;
+  }
+
+  public boolean getallowSubQueryExpr() {
+    return allowSubQueryExpr;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Tue Sep 23 20:00:10 2014
@@ -80,12 +80,12 @@ import org.apache.hadoop.hive.serde2.typ
  * expression Node Descriptor trees. They also introduce the correct conversion
  * functions to do proper implicit conversion.
  */
-public final class TypeCheckProcFactory {
+public class TypeCheckProcFactory {
 
   protected static final Log LOG = LogFactory.getLog(TypeCheckProcFactory.class
       .getName());
 
-  private TypeCheckProcFactory() {
+  protected TypeCheckProcFactory() {
     // prevent instantiation
   }
 
@@ -118,7 +118,7 @@ public final class TypeCheckProcFactory 
     RowResolver input = ctx.getInputRR();
     ExprNodeDesc desc = null;
 
-    if ((ctx == null) || (input == null)) {
+    if ((ctx == null) || (input == null) || (!ctx.getAllowGBExprElimination())) {
       return null;
     }
 
@@ -137,8 +137,13 @@ public final class TypeCheckProcFactory 
     return desc;
   }
 
-  public static Map<ASTNode, ExprNodeDesc> genExprNode(ASTNode expr,
-      TypeCheckCtx tcCtx) throws SemanticException {
+  public static Map<ASTNode, ExprNodeDesc> genExprNode(ASTNode expr, TypeCheckCtx tcCtx)
+      throws SemanticException {
+    return genExprNode(expr, tcCtx, new TypeCheckProcFactory());
+  }
+
+  protected static Map<ASTNode, ExprNodeDesc> genExprNode(ASTNode expr,
+      TypeCheckCtx tcCtx, TypeCheckProcFactory tf) throws SemanticException {
     // Create the walker, the rules dispatcher and the context.
     // create a walker which walks the tree in a DFS manner while maintaining
     // the operator stack. The dispatcher
@@ -146,13 +151,13 @@ public final class TypeCheckProcFactory 
     Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
 
     opRules.put(new RuleRegExp("R1", HiveParser.TOK_NULL + "%"),
-        getNullExprProcessor());
+        tf.getNullExprProcessor());
     opRules.put(new RuleRegExp("R2", HiveParser.Number + "%|" +
         HiveParser.TinyintLiteral + "%|" +
         HiveParser.SmallintLiteral + "%|" +
         HiveParser.BigintLiteral + "%|" +
         HiveParser.DecimalLiteral + "%"),
-        getNumExprProcessor());
+        tf.getNumExprProcessor());
     opRules
         .put(new RuleRegExp("R3", HiveParser.Identifier + "%|"
         + HiveParser.StringLiteral + "%|" + HiveParser.TOK_CHARSETLITERAL + "%|"
@@ -162,18 +167,18 @@ public final class TypeCheckProcFactory 
         + HiveParser.KW_ARRAY + "%|" + HiveParser.KW_MAP + "%|"
         + HiveParser.KW_STRUCT + "%|" + HiveParser.KW_EXISTS + "%|"
         + HiveParser.TOK_SUBQUERY_OP_NOTIN + "%"),
-        getStrExprProcessor());
+        tf.getStrExprProcessor());
     opRules.put(new RuleRegExp("R4", HiveParser.KW_TRUE + "%|"
-        + HiveParser.KW_FALSE + "%"), getBoolExprProcessor());
-    opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%"), getDateExprProcessor());
+        + HiveParser.KW_FALSE + "%"), tf.getBoolExprProcessor());
+    opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%"), tf.getDateExprProcessor());
     opRules.put(new RuleRegExp("R6", HiveParser.TOK_TABLE_OR_COL + "%"),
-        getColumnExprProcessor());
+        tf.getColumnExprProcessor());
     opRules.put(new RuleRegExp("R7", HiveParser.TOK_SUBQUERY_OP + "%"),
-        getSubQueryExprProcessor());
+        tf.getSubQueryExprProcessor());
 
     // The dispatcher fires the processor corresponding to the closest matching
     // rule and passes the context along
-    Dispatcher disp = new DefaultRuleDispatcher(getDefaultExprProcessor(),
+    Dispatcher disp = new DefaultRuleDispatcher(tf.getDefaultExprProcessor(),
         opRules, tcCtx);
     GraphWalker ogw = new DefaultGraphWalker(disp);
 
@@ -229,7 +234,7 @@ public final class TypeCheckProcFactory 
    *
    * @return NullExprProcessor.
    */
-  public static NullExprProcessor getNullExprProcessor() {
+  public NullExprProcessor getNullExprProcessor() {
     return new NullExprProcessor();
   }
 
@@ -304,7 +309,7 @@ public final class TypeCheckProcFactory 
    *
    * @return NumExprProcessor.
    */
-  public static NumExprProcessor getNumExprProcessor() {
+  public NumExprProcessor getNumExprProcessor() {
     return new NumExprProcessor();
   }
 
@@ -362,7 +367,7 @@ public final class TypeCheckProcFactory 
    *
    * @return StrExprProcessor.
    */
-  public static StrExprProcessor getStrExprProcessor() {
+  public StrExprProcessor getStrExprProcessor() {
     return new StrExprProcessor();
   }
 
@@ -408,7 +413,7 @@ public final class TypeCheckProcFactory 
    *
    * @return BoolExprProcessor.
    */
-  public static BoolExprProcessor getBoolExprProcessor() {
+  public BoolExprProcessor getBoolExprProcessor() {
     return new BoolExprProcessor();
   }
 
@@ -449,7 +454,7 @@ public final class TypeCheckProcFactory 
    *
    * @return DateExprProcessor.
    */
-  public static DateExprProcessor getDateExprProcessor() {
+  public DateExprProcessor getDateExprProcessor() {
     return new DateExprProcessor();
   }
 
@@ -546,7 +551,7 @@ public final class TypeCheckProcFactory 
    *
    * @return ColumnExprProcessor.
    */
-  public static ColumnExprProcessor getColumnExprProcessor() {
+  public ColumnExprProcessor getColumnExprProcessor() {
     return new ColumnExprProcessor();
   }
 
@@ -613,7 +618,7 @@ public final class TypeCheckProcFactory 
       windowingTokens.add(HiveParser.TOK_TABSORTCOLNAMEDESC);
     }
 
-    private static boolean isRedundantConversionFunction(ASTNode expr,
+    protected static boolean isRedundantConversionFunction(ASTNode expr,
         boolean isFunction, ArrayList<ExprNodeDesc> children) {
       if (!isFunction) {
         return false;
@@ -700,7 +705,30 @@ public final class TypeCheckProcFactory 
       return getFuncExprNodeDescWithUdfData(udfName, null, children);
     }
 
-    static ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr,
+    protected void validateUDF(ASTNode expr, boolean isFunction, TypeCheckCtx ctx, FunctionInfo fi,
+        List<ExprNodeDesc> children, GenericUDF genericUDF) throws SemanticException {
+      // Detect UDTF's in nested SELECT, GROUP BY, etc as they aren't
+      // supported
+      if (fi.getGenericUDTF() != null) {
+        throw new SemanticException(ErrorMsg.UDTF_INVALID_LOCATION.getMsg());
+      }
+      // UDAF in filter condition, group-by caluse, param of funtion, etc.
+      if (fi.getGenericUDAFResolver() != null) {
+        if (isFunction) {
+          throw new SemanticException(ErrorMsg.UDAF_INVALID_LOCATION.getMsg((ASTNode) expr
+              .getChild(0)));
+        } else {
+          throw new SemanticException(ErrorMsg.UDAF_INVALID_LOCATION.getMsg(expr));
+        }
+      }
+      if (!ctx.getAllowStatefulFunctions() && (genericUDF != null)) {
+        if (FunctionRegistry.isStateful(genericUDF)) {
+          throw new SemanticException(ErrorMsg.UDF_STATEFUL_INVALID_LOCATION.getMsg());
+        }
+      }
+    }
+
+    protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr,
         boolean isFunction, ArrayList<ExprNodeDesc> children, TypeCheckCtx ctx)
         throws SemanticException, UDFArgumentException {
       // return the child directly if the conversion is redundant.
@@ -713,6 +741,7 @@ public final class TypeCheckProcFactory 
       ExprNodeDesc desc;
       if (funcText.equals(".")) {
         // "." : FIELD Expression
+
         assert (children.size() == 2);
         // Only allow constant field name for now
         assert (children.get(1) instanceof ExprNodeConstantDesc);
@@ -727,23 +756,22 @@ public final class TypeCheckProcFactory 
         // Allow accessing a field of list element structs directly from a list
         boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST);
         if (isList) {
-          objectTypeInfo = ((ListTypeInfo) objectTypeInfo)
-              .getListElementTypeInfo();
+          objectTypeInfo = ((ListTypeInfo) objectTypeInfo).getListElementTypeInfo();
         }
         if (objectTypeInfo.getCategory() != Category.STRUCT) {
           throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr));
         }
-        TypeInfo t = ((StructTypeInfo) objectTypeInfo)
-            .getStructFieldTypeInfo(fieldNameString);
+        TypeInfo t = ((StructTypeInfo) objectTypeInfo).getStructFieldTypeInfo(fieldNameString);
         if (isList) {
           t = TypeInfoFactory.getListTypeInfo(t);
         }
 
-        desc = new ExprNodeFieldDesc(t, children.get(0), fieldNameString,
-            isList);
-
+        desc = new ExprNodeFieldDesc(t, children.get(0), fieldNameString, isList);
       } else if (funcText.equals("[")) {
         // "[]" : LSQUARE/INDEX Expression
+        if (!ctx.getallowIndexExpr())
+          throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(expr));
+
         assert (children.size() == 2);
 
         // Check whether this is a list or a map
@@ -759,8 +787,7 @@ public final class TypeCheckProcFactory 
 
           // Calculate TypeInfo
           TypeInfo t = ((ListTypeInfo) myt).getListElementTypeInfo();
-          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
-              .getGenericUDFForIndex(), children);
+          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry.getGenericUDFForIndex(), children);
         } else if (myt.getCategory() == Category.MAP) {
           if (!FunctionRegistry.implicitConvertible(children.get(1).getTypeInfo(),
               ((MapTypeInfo) myt).getMapKeyTypeInfo())) {
@@ -769,11 +796,9 @@ public final class TypeCheckProcFactory 
           }
           // Calculate TypeInfo
           TypeInfo t = ((MapTypeInfo) myt).getMapValueTypeInfo();
-          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
-              .getGenericUDFForIndex(), children);
+          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry.getGenericUDFForIndex(), children);
         } else {
-          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
-              myt.getTypeName()));
+          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr, myt.getTypeName()));
         }
       } else {
         // other operators or functions
@@ -825,26 +850,7 @@ public final class TypeCheckProcFactory 
           }
         }
 
-        // Detect UDTF's in nested SELECT, GROUP BY, etc as they aren't
-        // supported
-        if (fi.getGenericUDTF() != null) {
-          throw new SemanticException(ErrorMsg.UDTF_INVALID_LOCATION.getMsg());
-        }
-        // UDAF in filter condition, group-by caluse, param of funtion, etc.
-        if (fi.getGenericUDAFResolver() != null) {
-          if (isFunction) {
-            throw new SemanticException(ErrorMsg.UDAF_INVALID_LOCATION.
-                getMsg((ASTNode) expr.getChild(0)));
-          } else {
-            throw new SemanticException(ErrorMsg.UDAF_INVALID_LOCATION.getMsg(expr));
-          }
-        }
-        if (!ctx.getAllowStatefulFunctions() && (genericUDF != null)) {
-          if (FunctionRegistry.isStateful(genericUDF)) {
-            throw new SemanticException(
-              ErrorMsg.UDF_STATEFUL_INVALID_LOCATION.getMsg());
-          }
-        }
+        validateUDF(expr, isFunction, ctx, fi, children, genericUDF);
 
         // Try to infer the type of the constant only if there are two
         // nodes, one of them is column and the other is numeric const
@@ -955,6 +961,24 @@ public final class TypeCheckProcFactory 
       return false;
     }
 
+    protected ExprNodeColumnDesc processQualifiedColRef(TypeCheckCtx ctx, ASTNode expr,
+        Object... nodeOutputs) throws SemanticException {
+      RowResolver input = ctx.getInputRR();
+      String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getChild(0)
+          .getText());
+      // NOTE: tableAlias must be a valid non-ambiguous table alias,
+      // because we've checked that in TOK_TABLE_OR_COL's process method.
+      ColumnInfo colInfo = input.get(tableAlias, ((ExprNodeConstantDesc) nodeOutputs[1]).getValue()
+          .toString());
+
+      if (colInfo == null) {
+        ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr);
+        return null;
+      }
+      return new ExprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
+          colInfo.getTabAlias(), colInfo.getIsVirtualCol());
+    }
+
     @Override
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
         Object... nodeOutputs) throws SemanticException {
@@ -1004,7 +1028,11 @@ public final class TypeCheckProcFactory 
        * The difference is that there is translation for Window related tokens, so we just
        * return null;
        */
-      if ( windowingTokens.contains(expr.getType())) {
+      if (windowingTokens.contains(expr.getType())) {
+        if (!ctx.getallowWindowing())
+          throw new SemanticException(SemanticAnalyzer.generateErrorMessage(expr,
+              ErrorMsg.INVALID_FUNCTION.getMsg("Windowing is not supported in the context")));
+
         return null;
       }
 
@@ -1013,6 +1041,11 @@ public final class TypeCheckProcFactory 
       }
 
       if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
+        if (!ctx.getallowAllColRef())
+          throw new SemanticException(SemanticAnalyzer.generateErrorMessage(expr,
+              ErrorMsg.INVALID_COLUMN
+                  .getMsg("All column reference is not supported in the context")));
+
         RowResolver input = ctx.getInputRR();
         ExprNodeColumnListDesc columnList = new ExprNodeColumnListDesc();
         assert expr.getChildCount() <= 1;
@@ -1050,22 +1083,7 @@ public final class TypeCheckProcFactory 
       if (expr.getType() == HiveParser.DOT
           && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
           && nodeOutputs[0] == null) {
-
-        RowResolver input = ctx.getInputRR();
-        String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr
-            .getChild(0).getChild(0).getText());
-        // NOTE: tableAlias must be a valid non-ambiguous table alias,
-        // because we've checked that in TOK_TABLE_OR_COL's process method.
-        ColumnInfo colInfo = input.get(tableAlias,
-            ((ExprNodeConstantDesc) nodeOutputs[1]).getValue().toString());
-
-        if (colInfo == null) {
-          ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr);
-          return null;
-        }
-        return new ExprNodeColumnDesc(colInfo.getType(), colInfo
-            .getInternalName(), colInfo.getTabAlias(), colInfo
-            .getIsVirtualCol());
+        return processQualifiedColRef(ctx, expr, nodeOutputs);
       }
 
       // Return nulls for conversion operators
@@ -1080,7 +1098,7 @@ public final class TypeCheckProcFactory 
           expr.getType() == HiveParser.TOK_FUNCTIONSTAR ||
           expr.getType() == HiveParser.TOK_FUNCTIONDI);
 
-      if (!ctx.isAllowDistinctFunctions() && expr.getType() == HiveParser.TOK_FUNCTIONDI) {
+      if (!ctx.getAllowDistinctFunctions() && expr.getType() == HiveParser.TOK_FUNCTIONDI) {
         throw new SemanticException(
             SemanticAnalyzer.generateErrorMessage(expr, ErrorMsg.DISTINCT_NOT_SUPPORTED.getMsg()));
       }
@@ -1099,6 +1117,11 @@ public final class TypeCheckProcFactory 
       }
 
       if (expr.getType() == HiveParser.TOK_FUNCTIONSTAR) {
+        if (!ctx.getallowFunctionStar())
+        throw new SemanticException(SemanticAnalyzer.generateErrorMessage(expr,
+            ErrorMsg.INVALID_COLUMN
+                .getMsg(".* reference is not supported in the context")));
+
         RowResolver input = ctx.getInputRR();
         for (ColumnInfo colInfo : input.getColumnInfos()) {
           if (!colInfo.getIsVirtualCol()) {
@@ -1111,8 +1134,7 @@ public final class TypeCheckProcFactory 
       // If any of the children contains null, then return a null
       // this is a hack for now to handle the group by case
       if (children.contains(null)) {
-        RowResolver input = ctx.getInputRR();
-        List<String> possibleColumnNames = input.getReferenceableColumnAliases(null, -1);
+        List<String> possibleColumnNames = getReferenceableColumnAliases(ctx);
         String reason = String.format("(possible column names are: %s)",
             StringUtils.join(possibleColumnNames, ", "));
         ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(0), reason),
@@ -1135,6 +1157,9 @@ public final class TypeCheckProcFactory 
       }
     }
 
+    protected List<String> getReferenceableColumnAliases(TypeCheckCtx ctx) {
+      return ctx.getInputRR().getReferenceableColumnAliases(null, -1);
+    }
   }
 
   /**
@@ -1142,7 +1167,7 @@ public final class TypeCheckProcFactory 
    *
    * @return DefaultExprProcessor.
    */
-  public static DefaultExprProcessor getDefaultExprProcessor() {
+  public DefaultExprProcessor getDefaultExprProcessor() {
     return new DefaultExprProcessor();
   }
 
@@ -1160,13 +1185,18 @@ public final class TypeCheckProcFactory 
         return null;
       }
 
+      ASTNode expr = (ASTNode) nd;
+      ASTNode sqNode = (ASTNode) expr.getParent().getChild(1);
+
+      if (!ctx.getallowSubQueryExpr())
+        throw new SemanticException(SemanticAnalyzer.generateErrorMessage(sqNode,
+            ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg()));
+
       ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
       if (desc != null) {
         return desc;
       }
 
-      ASTNode expr = (ASTNode) nd;
-      ASTNode sqNode = (ASTNode) expr.getParent().getChild(1);
       /*
        * Restriction.1.h :: SubQueries only supported in the SQL Where Clause.
        */
@@ -1182,7 +1212,7 @@ public final class TypeCheckProcFactory 
    *
    * @return DateExprProcessor.
    */
-  public static SubQueryExprProcessor getSubQueryExprProcessor() {
+  public SubQueryExprProcessor getSubQueryExprProcessor() {
     return new SubQueryExprProcessor();
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java Tue Sep 23 20:00:10 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -93,7 +94,7 @@ public class ExprNodeConstantDesc extend
       return "null";
     }
 
-    if (typeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
+    if (typeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME) || typeInfo instanceof BaseCharTypeInfo) {
       return "'" + value.toString() + "'";
     } else if (typeInfo.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) {
       byte[] bytes = (byte[]) value;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java Tue Sep 23 20:00:10 2014
@@ -372,5 +372,42 @@ public class ExprNodeDescUtils {
     } catch (Exception e) {
       return null;
     }
-  }
+	}
+
+	public static void getExprNodeColumnDesc(List<ExprNodeDesc> exprDescList,
+			Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap) {
+		for (ExprNodeDesc exprNodeDesc : exprDescList) {
+			getExprNodeColumnDesc(exprNodeDesc, hashCodeTocolumnDescMap);
+		}
+	}
+
+	/**
+	 * Get Map of ExprNodeColumnDesc HashCode to ExprNodeColumnDesc.
+	 * 
+	 * @param exprDesc
+	 * @param hashCodeTocolumnDescMap
+	 *            Assumption: If two ExprNodeColumnDesc have same hash code then
+	 *            they are logically referring to same projection
+	 */
+	public static void getExprNodeColumnDesc(ExprNodeDesc exprDesc,
+			Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap) {
+		if (exprDesc instanceof ExprNodeColumnDesc) {
+			hashCodeTocolumnDescMap.put(
+					((ExprNodeColumnDesc) exprDesc).hashCode(),
+					((ExprNodeColumnDesc) exprDesc));
+		} else if (exprDesc instanceof ExprNodeColumnListDesc) {
+			for (ExprNodeDesc child : ((ExprNodeColumnListDesc) exprDesc)
+					.getChildren()) {
+				getExprNodeColumnDesc(child, hashCodeTocolumnDescMap);
+			}
+		} else if (exprDesc instanceof ExprNodeGenericFuncDesc) {
+			for (ExprNodeDesc child : ((ExprNodeGenericFuncDesc) exprDesc)
+					.getChildren()) {
+				getExprNodeColumnDesc(child, hashCodeTocolumnDescMap);
+			}
+		} else if (exprDesc instanceof ExprNodeFieldDesc) {
+			getExprNodeColumnDesc(((ExprNodeFieldDesc) exprDesc).getDesc(),
+					hashCodeTocolumnDescMap);
+		}
+	}
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java Tue Sep 23 20:00:10 2014
@@ -122,6 +122,7 @@ public class MapJoinDesc extends JoinDes
     }
   }
 
+  @Explain(displayName = "input vertices")
   public Map<Integer, String> getParentToInput() {
     return parentToInput;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Tue Sep 23 20:00:10 2014
@@ -93,6 +93,7 @@ public class StatsUtils {
 
   private static final Log LOG = LogFactory.getLog(StatsUtils.class.getName());
 
+
   /**
    * Collect table, partition and column level statistics
    * @param conf
@@ -109,15 +110,30 @@ public class StatsUtils {
   public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList partList,
       Table table, TableScanOperator tableScanOperator) throws HiveException {
 
-    Statistics stats = new Statistics();
-
     // column level statistics are required only for the columns that are needed
     List<ColumnInfo> schema = tableScanOperator.getSchema().getSignature();
     List<String> neededColumns = tableScanOperator.getNeededColumns();
+
+    return collectStatistics(conf, partList, table, schema, neededColumns);
+  }
+
+  private static Statistics collectStatistics(HiveConf conf, PrunedPartitionList partList,
+      Table table, List<ColumnInfo> schema, List<String> neededColumns) throws HiveException {
+
     boolean fetchColStats =
         HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_FETCH_COLUMN_STATS);
     boolean fetchPartStats =
         HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_FETCH_PARTITION_STATS);
+
+    return collectStatistics(conf, partList, table, schema, neededColumns, fetchColStats, fetchPartStats);
+  }
+
+  public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList partList,
+      Table table, List<ColumnInfo> schema, List<String> neededColumns,
+      boolean fetchColStats, boolean fetchPartStats) throws HiveException {
+
+    Statistics stats = new Statistics();
+
     float deserFactor =
         HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVE_STATS_DESERIALIZATION_FACTOR);
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java Tue Sep 23 20:00:10 2014
@@ -22,6 +22,7 @@ import java.util.TimeZone;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -33,7 +34,9 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-
+@Description(name = "from_utc_timestamp",
+             value = "from_utc_timestamp(timestamp, string timezone) - "
+                     + "Assumes given timestamp ist UTC and converts to given timezone (as of Hive 0.8.0)")
 public class GenericUDFFromUtcTimestamp extends GenericUDF {
 
   static final Log LOG = LogFactory.getLog(GenericUDFFromUtcTimestamp.class);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java Tue Sep 23 20:00:10 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -62,6 +63,11 @@ import org.apache.hadoop.hive.ql.exec.ve
  * otherwise it returns expr3. IF() returns a numeric or string value, depending
  * on the context in which it is used.
  */
+@Description(
+    name = "if",
+    value = "IF(expr1,expr2,expr3) - If expr1 is TRUE (expr1 <> 0 and expr1 <> NULL) then"
+    + " IF() returns expr2; otherwise it returns expr3. IF() returns a numeric or string value,"
+    + " depending on the context in which it is used.")
 @VectorizedExpressions({
   IfExprLongColumnLongColumn.class, IfExprDoubleColumnDoubleColumn.class,
   IfExprLongColumnLongScalar.class, IfExprDoubleColumnDoubleScalar.class,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java Tue Sep 23 20:00:10 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -39,6 +40,8 @@ import org.apache.hadoop.hive.serde2.obj
  * Creates a TimestampWritable object using PrimitiveObjectInspectorConverter
  *
  */
+@Description(name = "timestamp",
+value = "cast(date as timestamp) - Returns timestamp")
 @VectorizedExpressions({CastLongToTimestampViaLongToLong.class,
   CastDoubleToTimestampViaDoubleToLong.class, CastDecimalToTimestamp.class})
 public class GenericUDFTimestamp extends GenericUDF {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUtcTimestamp.java Tue Sep 23 20:00:10 2014
@@ -17,7 +17,11 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 
+@Description(name = "to_utc_timestamp",
+             value = "to_utc_timestamp(timestamp, string timezone) - "
+                     + "Assumes given timestamp is in given timezone and converts to UTC (as of Hive 0.8.0)")
 public class GenericUDFToUtcTimestamp extends
     GenericUDFFromUtcTimestamp {
 

Modified: hive/trunk/ql/src/test/queries/clientpositive/create_func1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/create_func1.q?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/create_func1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/create_func1.q Tue Sep 23 20:00:10 2014
@@ -2,11 +2,16 @@
 -- qtest_get_java_boolean should already be created during test initialization
 select qtest_get_java_boolean('true'), qtest_get_java_boolean('false') from src limit 1;
 
+describe function extended qtest_get_java_boolean;
+
 create database mydb;
 create function mydb.func1 as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper';
 
 show functions mydb.func1;
 
+describe function extended mydb.func1;
+
+
 select mydb.func1('abc') from src limit 1;
 
 drop function mydb.func1;

Modified: hive/trunk/ql/src/test/results/clientpositive/create_func1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/create_func1.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/create_func1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/create_func1.q.out Tue Sep 23 20:00:10 2014
@@ -9,6 +9,12 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 true	false
+PREHOOK: query: describe function extended qtest_get_java_boolean
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: describe function extended qtest_get_java_boolean
+POSTHOOK: type: DESCFUNCTION
+qtest_get_java_boolean(str) - GenericUDF to return native Java's boolean type
+Synonyms: default.qtest_get_java_boolean
 PREHOOK: query: create database mydb
 PREHOOK: type: CREATEDATABASE
 PREHOOK: Output: database:mydb
@@ -28,6 +34,15 @@ PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: show functions mydb.func1
 POSTHOOK: type: SHOWFUNCTIONS
 mydb.func1
+PREHOOK: query: describe function extended mydb.func1
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: describe function extended mydb.func1
+POSTHOOK: type: DESCFUNCTION
+mydb.func1(str) - Returns str with all characters changed to uppercase
+Synonyms: upper, ucase
+Example:
+  > SELECT mydb.func1('Facebook') FROM src LIMIT 1;
+  'FACEBOOK'
 PREHOOK: query: select mydb.func1('abc') from src limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/auto_join0.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/auto_join0.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/auto_join0.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/auto_join0.q.out Tue Sep 23 20:00:10 2014
@@ -56,6 +56,8 @@ STAGE PLANS:
                           0 
                           1 
                         outputColumnNames: _col0, _col1, _col2, _col3
+                        input vertices:
+                          1 Map 4
                         Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/auto_join1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/auto_join1.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/auto_join1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/auto_join1.q.out Tue Sep 23 20:00:10 2014
@@ -59,6 +59,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0, _col6
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: UDFToInteger(_col0) (type: int), _col6 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez1.q.out Tue Sep 23 20:00:10 2014
@@ -145,6 +145,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col7
+                      input vertices:
+                        0 Map 2
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col7 (type: string)
@@ -222,6 +224,8 @@ STAGE PLANS:
                         0 _col1 (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col3
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 133 Data size: 1411 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col1 (type: int), _col0 (type: double), _col3 (type: string)
@@ -337,6 +341,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col6, _col7
+                      input vertices:
+                        1 Map 4
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col6 (type: int), _col7 (type: string)
@@ -390,6 +396,8 @@ STAGE PLANS:
                       0 _col1 (type: int)
                       1 key (type: int)
                     outputColumnNames: _col0, _col1, _col3
+                    input vertices:
+                      1 Map 1
                     Statistics: Num rows: 150 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col1 (type: int), _col0 (type: double), _col3 (type: string)
@@ -450,6 +458,8 @@ STAGE PLANS:
                         0 _col1 (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col3
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col1 (type: int), _col0 (type: double), _col3 (type: string)
@@ -480,6 +490,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        1 Map 4
                       Statistics: Num rows: 133 Data size: 1411 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string)
@@ -604,6 +616,9 @@ STAGE PLANS:
                         1 key (type: int)
                         2 key (type: int)
                       outputColumnNames: _col0, _col1, _col7
+                      input vertices:
+                        1 Map 1
+                        2 Map 2
                       Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col7 (type: string)
@@ -674,6 +689,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string)
@@ -689,6 +706,8 @@ STAGE PLANS:
                             0 _col0 (type: int)
                             1 key (type: int)
                           outputColumnNames: _col0, _col1, _col3
+                          input vertices:
+                            1 Map 1
                           Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
                           Select Operator
                             expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string)
@@ -763,6 +782,8 @@ STAGE PLANS:
                         0 _col0 (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col3
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: double), _col3 (type: string)
@@ -864,6 +885,8 @@ STAGE PLANS:
                         0 _col0 (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col3
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: double), _col3 (type: string)
@@ -958,6 +981,8 @@ STAGE PLANS:
                         0 value (type: string)
                         1 value (type: string)
                       outputColumnNames: _col0, _col1, _col7
+                      input vertices:
+                        0 Map 2
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col7 (type: string)
@@ -1050,6 +1075,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col6
+                      input vertices:
+                        0 Map 2
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col6 (type: string)
@@ -1130,6 +1157,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        0 Map 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
@@ -1141,6 +1170,8 @@ STAGE PLANS:
                           0 _col1 (type: string)
                           1 value (type: string)
                         outputColumnNames: _col0, _col12
+                        input vertices:
+                          1 Map 1
                         Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: int), _col12 (type: int)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/bucket_map_join_tez2.q.out Tue Sep 23 20:00:10 2014
@@ -155,6 +155,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        0 Map 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
@@ -166,6 +168,8 @@ STAGE PLANS:
                           0 _col1 (type: string)
                           1 value (type: string)
                         outputColumnNames: _col0, _col12
+                        input vertices:
+                          1 Map 1
                         Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: int), _col12 (type: int)
@@ -258,6 +262,8 @@ STAGE PLANS:
                         0 UDFToDouble(key) (type: double)
                         1 UDFToDouble(key) (type: double)
                       outputColumnNames: _col0, _col1, _col6
+                      input vertices:
+                        0 Map 2
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col6 (type: string)
@@ -330,6 +336,8 @@ STAGE PLANS:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
                         outputColumnNames: _col0, _col1
+                        input vertices:
+                          1 Map 2
                         Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: int), _col1 (type: int)
@@ -405,6 +413,8 @@ STAGE PLANS:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
                         outputColumnNames: _col0, _col1
+                        input vertices:
+                          1 Map 2
                         Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: int), _col1 (type: int)
@@ -497,6 +507,8 @@ STAGE PLANS:
                           0 _col0 (type: int)
                           1 _col0 (type: int)
                         outputColumnNames: _col0, _col1
+                        input vertices:
+                          0 Map 1
                         Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: int), _col1 (type: int)
@@ -550,6 +562,8 @@ STAGE PLANS:
                         0 _col0 (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 133 Data size: 1411 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: int)
@@ -641,6 +655,8 @@ STAGE PLANS:
                         0 UDFToDouble(_col0) (type: double)
                         1 UDFToDouble(key) (type: double)
                       outputColumnNames: _col0, _col2
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 133 Data size: 1411 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string), _col2 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/correlationoptimizer1.q.out Tue Sep 23 20:00:10 2014
@@ -352,6 +352,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        0 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out Tue Sep 23 20:00:10 2014
@@ -64,6 +64,8 @@ STAGE PLANS:
                       0 
                       1 
                     outputColumnNames: _col0, _col1, _col5, _col6
+                    input vertices:
+                      1 Map 1
                     Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
@@ -118,6 +120,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0, _col1, _col5, _col6
+                      input vertices:
+                        1 Map 2
                       Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
@@ -152,6 +156,8 @@ STAGE PLANS:
                       0 
                       1 
                     outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11
+                    input vertices:
+                      0 Map 1
                     Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col10 (type: string), _col11 (type: string)
@@ -213,6 +219,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string)
@@ -256,6 +264,8 @@ STAGE PLANS:
                       0 
                       1 
                     outputColumnNames: _col0, _col1, _col5
+                    input vertices:
+                      1 Reducer 2
                     Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string)
@@ -324,6 +334,8 @@ STAGE PLANS:
                       0 
                       1 
                     outputColumnNames: _col0
+                    input vertices:
+                      1 Map 3
                     Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string)
@@ -362,6 +374,8 @@ STAGE PLANS:
                       0 
                       1 
                     outputColumnNames: _col0, _col1, _col5
+                    input vertices:
+                      1 Reducer 2
                     Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string)
@@ -436,6 +450,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string)
@@ -519,6 +535,8 @@ STAGE PLANS:
                       0 
                       1 
                     outputColumnNames: _col0, _col1
+                    input vertices:
+                      1 Reducer 2
                     Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out Tue Sep 23 20:00:10 2014
@@ -3521,6 +3521,8 @@ STAGE PLANS:
                     keys:
                       0 ds (type: string)
                       1 ds (type: string)
+                    input vertices:
+                      1 Map 3
                     Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
@@ -3654,6 +3656,8 @@ STAGE PLANS:
                       0 ds (type: string)
                       1 ds (type: string)
                     outputColumnNames: _col3
+                    input vertices:
+                      1 Map 4
                     Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
@@ -3664,6 +3668,8 @@ STAGE PLANS:
                       keys:
                         0 _col3 (type: string)
                         1 hr (type: string)
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 2420 Data size: 25709 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 2420 Data size: 25709 Basic stats: COMPLETE Column stats: NONE
@@ -3825,6 +3831,8 @@ STAGE PLANS:
                     keys:
                       0 ds (type: string), hr (type: string)
                       1 ds (type: string), hr (type: string)
+                    input vertices:
+                      1 Map 3
                     Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
@@ -3969,6 +3977,8 @@ STAGE PLANS:
                     keys:
                       0 ds (type: string)
                       1 ds (type: string)
+                    input vertices:
+                      1 Map 3
                     Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
@@ -4077,6 +4087,8 @@ STAGE PLANS:
                       keys:
                         0 UDFToDouble(hr) (type: double)
                         1 UDFToDouble(UDFToInteger((hr / 2))) (type: double)
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 1100 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 1100 Data size: 0 Basic stats: PARTIAL Column stats: NONE
@@ -4196,6 +4208,8 @@ STAGE PLANS:
                       keys:
                         0 (hr * 2) (type: double)
                         1 hr (type: double)
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 1100 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 1100 Data size: 0 Basic stats: PARTIAL Column stats: NONE
@@ -4378,6 +4392,8 @@ STAGE PLANS:
                     keys:
                       0 ds (type: string)
                       1 _col0 (type: string)
+                    input vertices:
+                      0 Map 1
                     Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
                     Select Operator
                       Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
@@ -4478,6 +4494,8 @@ STAGE PLANS:
                       0 ds (type: string)
                       1 ds (type: string)
                     outputColumnNames: _col8
+                    input vertices:
+                      1 Map 3
                     Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                     Filter Operator
                       predicate: (_col8 = '2008-04-08') (type: boolean)
@@ -4573,6 +4591,8 @@ STAGE PLANS:
                       keys:
                         0 ds (type: string)
                         1 ds (type: string)
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
@@ -4734,6 +4754,8 @@ STAGE PLANS:
                       0 ds (type: string)
                       1 ds (type: string)
                     outputColumnNames: _col3
+                    input vertices:
+                      1 Map 4
                     Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
@@ -4744,6 +4766,8 @@ STAGE PLANS:
                       keys:
                         0 _col3 (type: string)
                         1 '11' (type: string)
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 1210 Data size: 12854 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 1210 Data size: 12854 Basic stats: COMPLETE Column stats: NONE
@@ -4990,6 +5014,8 @@ STAGE PLANS:
                       0 ds (type: string)
                       1 _col0 (type: string)
                     outputColumnNames: _col2
+                    input vertices:
+                      1 Union 3
                     Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col2 (type: string)
@@ -5207,6 +5233,8 @@ STAGE PLANS:
                       keys:
                         0 ds (type: string), UDFToDouble(hr) (type: double)
                         1 ds (type: string), UDFToDouble(hr) (type: double)
+                      input vertices:
+                        1 Map 3
                       Statistics: Num rows: 1100 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 1100 Data size: 0 Basic stats: PARTIAL Column stats: NONE

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning_2.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning_2.q.out Tue Sep 23 20:00:10 2014
@@ -178,6 +178,8 @@ STAGE PLANS:
                       0 dim_shops_id (type: int)
                       1 id (type: int)
                     outputColumnNames: _col0, _col1, _col5, _col6
+                    input vertices:
+                      1 Map 1
                     Statistics: Num rows: 0 Data size: 39 Basic stats: PARTIAL Column stats: NONE
                     Filter Operator
                       predicate: ((_col1 = _col5) and (_col6) IN ('foo', 'bar')) (type: boolean)
@@ -362,6 +364,8 @@ STAGE PLANS:
                       0 dim_shops_id (type: int)
                       1 id (type: int)
                     outputColumnNames: _col0, _col1, _col5
+                    input vertices:
+                      1 Map 1
                     Filter Operator
                       predicate: (_col1 = _col5) (type: boolean)
                       Select Operator
@@ -391,6 +395,8 @@ STAGE PLANS:
                       0 dim_shops_id (type: int)
                       1 id (type: int)
                     outputColumnNames: _col0, _col1, _col5
+                    input vertices:
+                      1 Map 2
                     Filter Operator
                       predicate: (_col1 = _col5) (type: boolean)
                       Select Operator

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out Tue Sep 23 20:00:10 2014
@@ -120,6 +120,8 @@ STAGE PLANS:
                         0 dec (type: decimal(4,2))
                         1 dec (type: decimal(4,0))
                       outputColumnNames: _col0, _col4
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 577 Data size: 64680 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: decimal(4,2)), _col4 (type: decimal(4,0))

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out Tue Sep 23 20:00:10 2014 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/mrr.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/mrr.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/mrr.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/mrr.q.out Tue Sep 23 20:00:10 2014
@@ -867,6 +867,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col5, _col6
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col5 (type: string), _col6 (type: string)
@@ -1706,6 +1708,8 @@ STAGE PLANS:
                         0 _col0 (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
+                      input vertices:
+                        0 Reducer 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: string), _col3 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/tez_bmj_schema_evolution.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/tez_bmj_schema_evolution.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/tez_bmj_schema_evolution.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/tez_bmj_schema_evolution.q.out Tue Sep 23 20:00:10 2014
@@ -113,6 +113,8 @@ STAGE PLANS:
                         0 key (type: int)
                         1 key (type: int)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 550 Data size: 28771 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/tez_union.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/tez_union.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/tez_union.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/tez_union.q.out Tue Sep 23 20:00:10 2014
@@ -36,6 +36,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0, _col1
+                      input vertices:
+                        1 Map 3
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string)
                         outputColumnNames: _col0, _col1
@@ -355,6 +357,8 @@ STAGE PLANS:
                           0 key (type: string)
                           1 _col0 (type: string)
                         outputColumnNames: _col0, _col5
+                        input vertices:
+                          0 Map 1
                         Select Operator
                           expressions: _col0 (type: string), _col5 (type: string)
                           outputColumnNames: _col0, _col1
@@ -383,6 +387,8 @@ STAGE PLANS:
                           0 key (type: string)
                           1 _col0 (type: string)
                         outputColumnNames: _col0, _col5
+                        input vertices:
+                          0 Map 1
                         Select Operator
                           expressions: _col0 (type: string), _col5 (type: string)
                           outputColumnNames: _col0, _col1
@@ -535,6 +541,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 5
                       Select Operator
                         expressions: _col0 (type: string)
                         outputColumnNames: _col0
@@ -551,6 +559,9 @@ STAGE PLANS:
                             1 _col0 (type: string)
                             2 key (type: string)
                           outputColumnNames: _col0, _col5, _col6
+                          input vertices:
+                            0 Map 1
+                            2 Map 8
                           Select Operator
                             expressions: _col0 (type: string), _col5 (type: string), _col6 (type: string)
                             outputColumnNames: _col0, _col1, _col2
@@ -600,6 +611,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 6
                       Select Operator
                         expressions: _col0 (type: string)
                         outputColumnNames: _col0
@@ -616,6 +629,9 @@ STAGE PLANS:
                             1 _col0 (type: string)
                             2 key (type: string)
                           outputColumnNames: _col0, _col5, _col6
+                          input vertices:
+                            0 Map 1
+                            2 Map 8
                           Select Operator
                             expressions: _col0 (type: string), _col5 (type: string), _col6 (type: string)
                             outputColumnNames: _col0, _col1, _col2
@@ -662,6 +678,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 10
                       Select Operator
                         expressions: _col0 (type: string)
                         outputColumnNames: _col0
@@ -678,6 +696,9 @@ STAGE PLANS:
                             1 _col0 (type: string)
                             2 key (type: string)
                           outputColumnNames: _col0, _col5, _col6
+                          input vertices:
+                            0 Map 1
+                            2 Map 8
                           Select Operator
                             expressions: _col0 (type: string), _col5 (type: string), _col6 (type: string)
                             outputColumnNames: _col0, _col1, _col2
@@ -883,6 +904,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 3
                       Select Operator
                         expressions: _col0 (type: string)
                         outputColumnNames: _col0
@@ -924,6 +947,8 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0
+                      input vertices:
+                        1 Map 5
                       Select Operator
                         expressions: _col0 (type: string)
                         outputColumnNames: _col0
@@ -1057,6 +1082,8 @@ STAGE PLANS:
                         0 _col0 (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
+                      input vertices:
+                        1 Map 3
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                         outputColumnNames: _col0, _col1, _col2, _col3
@@ -1100,6 +1127,8 @@ STAGE PLANS:
                         0 _col0 (type: string)
                         1 key (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3
+                      input vertices:
+                        1 Map 3
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                         outputColumnNames: _col0, _col1, _col2, _col3
@@ -1167,6 +1196,8 @@ STAGE PLANS:
                       0 _col0 (type: string)
                       1 key (type: string)
                     outputColumnNames: _col0, _col2
+                    input vertices:
+                      0 Union 2
                     Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col2 (type: string)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/vector_left_outer_join.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/vector_left_outer_join.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/vector_left_outer_join.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/vector_left_outer_join.q.out Tue Sep 23 20:00:10 2014
@@ -54,6 +54,8 @@ STAGE PLANS:
                       0 cint (type: int)
                       1 cint (type: int)
                     outputColumnNames: _col0
+                    input vertices:
+                      1 Map 4
                     Statistics: Num rows: 13516 Data size: 414960 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
@@ -64,6 +66,8 @@ STAGE PLANS:
                       keys:
                         0 _col0 (type: tinyint)
                         1 ctinyint (type: tinyint)
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 14867 Data size: 456456 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         Statistics: Num rows: 14867 Data size: 456456 Basic stats: COMPLETE Column stats: NONE

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out Tue Sep 23 20:00:10 2014
@@ -204,6 +204,8 @@ STAGE PLANS:
                       0 _col0 (type: int)
                       1 l_partkey (type: int)
                     outputColumnNames: _col0, _col1, _col3
+                    input vertices:
+                      1 Map 1
                     Statistics: Num rows: 831 Data size: 3326 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
@@ -215,6 +217,8 @@ STAGE PLANS:
                         0 _col1 (type: int)
                         1 _col0 (type: int)
                       outputColumnNames: _col0, _col3
+                      input vertices:
+                        1 Map 4
                       Statistics: Num rows: 914 Data size: 3658 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col3 (type: int)
@@ -363,6 +367,8 @@ STAGE PLANS:
                       0 _col0 (type: int)
                       1 l_partkey (type: int)
                     outputColumnNames: _col0, _col1, _col3
+                    input vertices:
+                      1 Map 1
                     Statistics: Num rows: 831 Data size: 3326 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
@@ -374,6 +380,8 @@ STAGE PLANS:
                         0 _col1 (type: int), 1 (type: int)
                         1 _col0 (type: int), _col1 (type: int)
                       outputColumnNames: _col0, _col3
+                      input vertices:
+                        1 Map 4
                       Statistics: Num rows: 914 Data size: 3658 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col3 (type: int)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_mapjoin.q.out Tue Sep 23 20:00:10 2014
@@ -50,6 +50,8 @@ STAGE PLANS:
                         0 cint (type: int)
                         1 cint (type: int)
                       outputColumnNames: _col2, _col17
+                      input vertices:
+                        1 Map 1
                       Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col2 (type: int), _col17 (type: int)

Modified: hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_nested_mapjoin.q.out Tue Sep 23 20:00:10 2014
@@ -33,6 +33,8 @@ STAGE PLANS:
                         0 _col0 (type: smallint)
                         1 csmallint (type: smallint)
                       outputColumnNames: _col1
+                      input vertices:
+                        0 Map 4
                       Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col1 (type: double)
@@ -81,6 +83,8 @@ STAGE PLANS:
                         0 ctinyint (type: tinyint)
                         1 ctinyint (type: tinyint)
                       outputColumnNames: _col0, _col1, _col5, _col15
+                      input vertices:
+                        0 Map 3
                       Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
                         predicate: (_col0 = _col15) (type: boolean)

Modified: hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out?rev=1627140&r1=1627139&r2=1627140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out Tue Sep 23 20:00:10 2014
@@ -2,12 +2,12 @@ PREHOOK: query: DESCRIBE FUNCTION if
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION if
 POSTHOOK: type: DESCFUNCTION
-There is no documentation for function 'if'
+IF(expr1,expr2,expr3) - If expr1 is TRUE (expr1 <> 0 and expr1 <> NULL) then IF() returns expr2; otherwise it returns expr3. IF() returns a numeric or string value, depending on the context in which it is used.
 PREHOOK: query: DESCRIBE FUNCTION EXTENDED if
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED if
 POSTHOOK: type: DESCFUNCTION
-There is no documentation for function 'if'
+IF(expr1,expr2,expr3) - If expr1 is TRUE (expr1 <> 0 and expr1 <> NULL) then IF() returns expr2; otherwise it returns expr3. IF() returns a numeric or string value, depending on the context in which it is used.
 PREHOOK: query: EXPLAIN
 SELECT IF(TRUE, 1, 2) AS COL1,
        IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,