You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ji...@apache.org on 2014/03/04 20:28:06 UTC

svn commit: r1574177 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hadoop/hive/common/type/ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/ ql/...

Author: jitendra
Date: Tue Mar  4 19:28:06 2014
New Revision: 1574177

URL: http://svn.apache.org/r1574177
Log:
HIVE-6496: Queries fail to Vectorize (jitendra)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_cast.q
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/SqlMathUtil.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/SqlMathUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/SqlMathUtil.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/SqlMathUtil.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/SqlMathUtil.java Tue Mar  4 19:28:06 2014
@@ -393,12 +393,15 @@ public final class SqlMathUtil {
     return (int) remainder;
   }
 
+  /**
+   * Returns length of the array discounting the trailing elements with zero value.
+   */
   private static int arrayValidLength(int[] array) {
     int len = array.length;
-    while (len >= 0 && array[len - 1] == 0) {
+    while (len > 0 && array[len - 1] == 0) {
       --len;
     }
-    return len < 0 ? 0 : len;
+    return len <= 0 ? 0 : len;
   }
 
   /**

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java Tue Mar  4 19:28:06 2014
@@ -43,5 +43,16 @@ public class TestSqlMathUtil {
       assertArrayEquals(new int[] { 0x10000, 0, 0, 0, 0 }, quotient);
       assertArrayEquals(new int[] { 0, 0, 0, 0, 0 }, remainder);
     }
+
+    {
+      // Zero dividend
+      int[] dividend = new int[] { 0, 0, 0, 0 };
+      int[] divisor = new int[] { 0xF700, 0, 0x3900, 0 };
+      int[] quotient = new int[5];
+      int[] remainder = SqlMathUtil.divideMultiPrecision(dividend, divisor,
+          quotient);
+      assertArrayEquals(new int[] { 0, 0, 0, 0, 0 }, quotient);
+      assertArrayEquals(new int[] { 0, 0, 0, 0, 0 }, remainder);
+    }
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java Tue Mar  4 19:28:06 2014
@@ -199,7 +199,7 @@ public class VectorExpressionDescriptor 
       b.append(mode);
       b.append(", Argument Types = {");
       for (int i = 0; i < argCount; i++) {
-        if (i == 0) {
+        if (i != 0) {
           b.append(",");
         }
         b.append(argTypes[i]);
@@ -208,7 +208,7 @@ public class VectorExpressionDescriptor 
 
       b.append(", Input Expression Types = {");
       for (int i = 0; i < argCount; i++) {
-        if (i == 0) {
+        if (i != 0) {
           b.append(",");
         }
         b.append(exprTypes[i]);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Tue Mar  4 19:28:06 2014
@@ -344,7 +344,7 @@ public class VectorizationContext {
    * @return List of child expressions added with cast.
    */
   private List<ExprNodeDesc> getChildExpressionsWithImplicitCast(GenericUDF genericUDF,
-      List<ExprNodeDesc> children, TypeInfo returnType) {
+      List<ExprNodeDesc> children, TypeInfo returnType) throws HiveException {
     if (isExcludedFromCast(genericUDF)) {
 
       // No implicit cast needed
@@ -407,7 +407,8 @@ public class VectorizationContext {
    * The GenericUDFs might need their children output to be cast to the given castType.
    * This method returns a cast expression that would achieve the required casting.
    */
-  private ExprNodeDesc getImplicitCastExpression(GenericUDF udf, ExprNodeDesc child, TypeInfo castType) {
+  private ExprNodeDesc getImplicitCastExpression(GenericUDF udf, ExprNodeDesc child, TypeInfo castType)
+      throws HiveException {
     TypeInfo inputTypeInfo = child.getTypeInfo();
     String inputTypeString = inputTypeInfo.getTypeName();
     String castTypeString = castType.getTypeName();
@@ -457,7 +458,7 @@ public class VectorizationContext {
     return null;
   }
 
-  private GenericUDF getGenericUDFForCast(TypeInfo castType) {
+  private GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException {
     UDF udfClass = null;
     GenericUDF genericUdf = null;
     switch (((PrimitiveTypeInfo) castType).getPrimitiveCategory()) {
@@ -494,8 +495,14 @@ public class VectorizationContext {
       case BINARY:
         genericUdf = new GenericUDFToBinary();
         break;
+      case DECIMAL:
+        genericUdf = new GenericUDFToDecimal();
+        break;
     }
     if (genericUdf == null) {
+      if (udfClass == null) {
+        throw new HiveException("Could not add implicit cast for type "+castType.getTypeName());
+      }
       genericUdf = new GenericUDFBridge();
       ((GenericUDFBridge) genericUdf).setUdfClassName(udfClass.getClass().getName());
     }
@@ -713,10 +720,11 @@ public class VectorizationContext {
         throw new HiveException("Cannot handle expression type: " + child.getClass().getSimpleName());
       }
     }
-    Class<?> vclass = this.vMap.getVectorExpressionClass(udf, builder.build());
+    VectorExpressionDescriptor.Descriptor descriptor = builder.build();
+    Class<?> vclass = this.vMap.getVectorExpressionClass(udf, descriptor);
     if (vclass == null) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("No vector udf found for "+udf.getSimpleName());
+        LOG.debug("No vector udf found for "+udf.getSimpleName() + ", descriptor: "+descriptor);
       }
       return null;
     }
@@ -963,7 +971,7 @@ public class VectorizationContext {
     } else if (isCastToFloatFamily(cl)) {
       return getCastToDoubleExpression(cl, childExpr, returnType);
     } else if (cl.equals(UDFToString.class)) {
-      return getCastToString(childExpr);
+      return getCastToString(childExpr, returnType);
     }
     return null;
   }
@@ -987,6 +995,8 @@ public class VectorizationContext {
           returnType);
     } else if (isStringFamily(inputType)) {
       return createVectorExpression(CastStringToDecimal.class, childExpr, Mode.PROJECTION, returnType);
+    } else if (isDatetimeFamily(inputType)) {
+      return createVectorExpression(CastTimestampToDecimal.class, childExpr, Mode.PROJECTION, returnType);
     }
     throw new HiveException("Unhandled cast input type: " + inputType);
   }
@@ -1031,7 +1041,7 @@ public class VectorizationContext {
     return d;
   }
 
-  private VectorExpression getCastToString(List<ExprNodeDesc> childExpr)
+  private VectorExpression getCastToString(List<ExprNodeDesc> childExpr, TypeInfo returnType)
       throws HiveException {
     String inputType = childExpr.get(0).getTypeString();
     if (inputType.equals("boolean")) {
@@ -1039,6 +1049,8 @@ public class VectorizationContext {
       return createVectorExpression(CastBooleanToStringViaLongToString.class, childExpr, Mode.PROJECTION, null);
     } else if (isIntFamily(inputType)) {
       return createVectorExpression(CastLongToString.class, childExpr, Mode.PROJECTION, null);
+    } else if (isDecimalFamily(inputType)) {
+      return createVectorExpression(CastDecimalToString.class, childExpr, Mode.PROJECTION, returnType);
     }
     /* The string type is deliberately omitted -- the planner removes string to string casts.
      * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java Tue Mar  4 19:28:06 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.v
 import java.sql.Date;
 import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
@@ -30,16 +31,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -300,6 +292,14 @@ public class VectorUDFAdaptor extends Ve
       } else {
         lv.vector[i] = ((WritableBooleanObjectInspector) outputOI).get(value) ? 1 : 0;
       }
+    } else if (outputOI instanceof WritableHiveDecimalObjectInspector) {
+      DecimalColumnVector dcv = (DecimalColumnVector) colVec;
+      if (value instanceof HiveDecimal) {
+        dcv.vector[i].update(((HiveDecimal) value).bigDecimalValue());
+      } else {
+        HiveDecimal hd = ((WritableHiveDecimalObjectInspector) outputOI).getPrimitiveJavaObject(value);
+        dcv.vector[i].update(hd.bigDecimalValue());
+      }
     } else {
       throw new RuntimeException("Unhandled object type " + outputOI.getTypeName());
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Tue Mar  4 19:28:06 2014
@@ -34,19 +34,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
-import org.apache.hadoop.hive.ql.exec.FilterOperator;
-import org.apache.hadoop.hive.ql.exec.GroupByOperator;
-import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.OperatorFactory;
-import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
-import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.SelectOperator;
-import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.*;
 import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
@@ -65,7 +53,9 @@ import org.apache.hadoop.hive.ql.lib.Rul
 import org.apache.hadoop.hive.ql.lib.RuleRegExp;
 import org.apache.hadoop.hive.ql.lib.TaskGraphWalker;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc;
@@ -693,7 +683,7 @@ public class Vectorizer implements Physi
         // TODO: this cannot happen - VectorizationContext throws in such cases.
         return false;
       }
-    } catch (HiveException e) {
+    } catch (Exception e) {
       if (LOG.isDebugEnabled()) {
         LOG.debug("Failed to vectorize", e);
       }
@@ -731,19 +721,22 @@ public class Vectorizer implements Physi
 
   private VectorizationContext getVectorizationContext(Operator<? extends OperatorDesc> op,
       PhysicalContext pctx) {
-    RowResolver rr = pctx.getParseContext().getOpParseCtx().get(op).getRowResolver();
+    RowSchema rs = op.getSchema();
 
     Map<String, Integer> cmap = new HashMap<String, Integer>();
     int columnCount = 0;
-    for (ColumnInfo c : rr.getColumnInfos()) {
+    for (ColumnInfo c : rs.getSignature()) {
       if (!c.getIsVirtualCol()) {
         cmap.put(c.getInternalName(), columnCount++);
       }
     }
-    Table tab = pctx.getParseContext().getTopToTable().get(op);
-    if (tab.getPartitionKeys() != null) {
-      for (FieldSchema fs : tab.getPartitionKeys()) {
-        cmap.put(fs.getName(), columnCount++);
+    PrunedPartitionList partList = pctx.getParseContext().getOpToPartList().get(op);
+    if (partList != null) {
+      Table tab = partList.getSourceTable();
+      if (tab.getPartitionKeys() != null) {
+        for (FieldSchema fs : tab.getPartitionKeys()) {
+          cmap.put(fs.getName(), columnCount++);
+        }
       }
     }
     return new VectorizationContext(cmap, columnCount);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java Tue Mar  4 19:28:06 2014
@@ -22,6 +22,7 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -42,7 +43,7 @@ import org.apache.hadoop.io.Text;
  *
  */
 @VectorizedExpressions({CastLongToBooleanViaLongToLong.class,
-  CastDoubleToBooleanViaDoubleToLong.class})
+  CastDoubleToBooleanViaDoubleToLong.class, CastDecimalToBoolean.class})
 public class UDFToBoolean extends UDF {
   private final BooleanWritable booleanWritable = new BooleanWritable();
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java Tue Mar  4 19:28:06 2014
@@ -21,18 +21,7 @@ package org.apache.hadoop.hive.ql.udf.ge
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloDoubleColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloDoubleScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloLongColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloLongScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarModuloDoubleColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarModuloLongColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloDoubleColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloDoubleScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarModuloDoubleColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -49,7 +38,9 @@ import org.apache.hadoop.io.LongWritable
   LongColModuloLongScalar.class, LongColModuloDoubleScalar.class,
   DoubleColModuloLongScalar.class, DoubleColModuloDoubleScalar.class,
   LongScalarModuloLongColumn.class, LongScalarModuloDoubleColumn.class,
-  DoubleScalarModuloLongColumn.class, DoubleScalarModuloDoubleColumn.class})
+  DoubleScalarModuloLongColumn.class, DoubleScalarModuloDoubleColumn.class,
+  DecimalColModuloDecimalColumn.class, DecimalColModuloDecimalScalar.class,
+  DecimalScalarModuloDecimalColumn.class})
 public class GenericUDFOPMod extends GenericUDFBaseNumeric {
 
   public GenericUDFOPMod() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java?rev=1574177&r1=1574176&r2=1574177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java Tue Mar  4 19:28:06 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.ge
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToTimestampViaDoubleToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToTimestampViaLongToLong;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -39,7 +40,7 @@ import org.apache.hadoop.hive.serde2.obj
  *
  */
 @VectorizedExpressions({CastLongToTimestampViaLongToLong.class,
-  CastDoubleToTimestampViaDoubleToLong.class})
+  CastDoubleToTimestampViaDoubleToLong.class, CastDecimalToTimestamp.class})
 public class GenericUDFTimestamp extends GenericUDF {
 
   private transient PrimitiveObjectInspector argumentOI;

Added: hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_cast.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_cast.q?rev=1574177&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_cast.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_cast.q Tue Mar  4 19:28:06 2014
@@ -0,0 +1,5 @@
+SET hive.vectorized.execution.enabled=true;
+
+EXPLAIN SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10;
+
+SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10;

Added: hive/trunk/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vector_decimal_cast.q.out?rev=1574177&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/vector_decimal_cast.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/vector_decimal_cast.q.out Tue Mar  4 19:28:06 2014
@@ -0,0 +1,56 @@
+PREHOOK: query: EXPLAIN SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 6736 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (((cdouble is not null and cint is not null) and cboolean1 is not null) and ctimestamp1 is not null) (type: boolean)
+              Statistics: Num rows: 421 Data size: 23577 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: cdouble (type: double), cint (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), CAST( cdouble AS decimal(20,10)) (type: decimal(20,10)), CAST( cint AS decimal(23,14)) (type: decimal(23,14)), CAST( cboolean1 AS decimal(5,2)) (type: decimal(5,2)), CAST( ctimestamp1 AS decimal(15,0)) (type: decimal(15,0))
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
+                Statistics: Num rows: 421 Data size: 23577 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 560 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 560 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+
+PREHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS DECIMAL(20,10)), CAST(cint AS DECIMAL(23,14)), CAST(cboolean1 AS DECIMAL(5,2)), CAST(ctimestamp1 AS DECIMAL(15,0)) FROM alltypesorc WHERE cdouble IS NOT NULL AND cint IS NOT NULL AND cboolean1 IS NOT NULL AND ctimestamp1 IS NOT NULL LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-13326.0	528534767	true	1969-12-31 15:59:46.674	-13326	528534767	1	-13
+-15813.0	528534767	true	1969-12-31 15:59:55.787	-15813	528534767	1	-4
+-9566.0	528534767	true	1969-12-31 15:59:44.187	-9566	528534767	1	-16
+15007.0	528534767	true	1969-12-31 15:59:50.434	15007	528534767	1	-10
+7021.0	528534767	true	1969-12-31 16:00:15.007	7021	528534767	1	15
+4963.0	528534767	true	1969-12-31 16:00:07.021	4963	528534767	1	7
+-7824.0	528534767	true	1969-12-31 16:00:04.963	-7824	528534767	1	5
+-15431.0	528534767	true	1969-12-31 15:59:52.176	-15431	528534767	1	-8
+-15549.0	528534767	true	1969-12-31 15:59:44.569	-15549	528534767	1	-15
+5780.0	528534767	true	1969-12-31 15:59:44.451	5780	528534767	1	-16