You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2013/11/20 06:40:48 UTC

svn commit: r1543711 [1/4] - in /hive/trunk: common/src/test/org/apache/hadoop/hive/common/type/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/j...

Author: xuefu
Date: Wed Nov 20 05:40:46 2013
New Revision: 1543711

URL: http://svn.apache.org/r1543711
Log:
HIVE-5356: Move arithmatic UDFs to generic UDF implementations (reviewed by Brock)

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPosMod.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java
Removed:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFOPDivide.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFOPMod.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFPosMod.java
Modified:
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
    hive/trunk/ql/src/test/results/clientnegative/invalid_arithmetic_type.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_assert_true2.q.out
    hive/trunk/ql/src/test/results/clientpositive/auto_join13.q.out
    hive/trunk/ql/src/test/results/clientpositive/auto_join2.q.out
    hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/input8.q.out
    hive/trunk/ql/src/test/results/clientpositive/num_op_type_conv.q.out
    hive/trunk/ql/src/test/results/clientpositive/orc_createas1.q.out
    hive/trunk/ql/src/test/results/clientpositive/ppd_constant_expr.q.out
    hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
    hive/trunk/ql/src/test/results/clientpositive/rcfile_createas1.q.out
    hive/trunk/ql/src/test/results/clientpositive/rcfile_merge1.q.out
    hive/trunk/ql/src/test/results/clientpositive/rcfile_merge2.q.out
    hive/trunk/ql/src/test/results/clientpositive/skewjoin.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_case.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_pmod.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_when.q.out
    hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out
    hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out
    hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
    hive/trunk/ql/src/test/results/clientpositive/vectorized_math_funcs.q.out
    hive/trunk/ql/src/test/results/clientpositive/windowing_expressions.q.out
    hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantByteObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantIntObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantLongObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantShortObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java Wed Nov 20 05:40:46 2013
@@ -102,6 +102,14 @@ public class TestHiveDecimal {
   }
 
   @Test
+  public void testPosMod() {
+    HiveDecimal hd1 = HiveDecimal.create("-100.91");
+    HiveDecimal hd2 = HiveDecimal.create("9.8");
+    HiveDecimal dec = hd1.remainder(hd2).add(hd2).remainder(hd2);
+    Assert.assertEquals("6.89", dec.toString());
+  }
+
+  @Test
   public void testException() {
     HiveDecimal dec = HiveDecimal.create("3.1415.926");
     Assert.assertNull(dec);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Wed Nov 20 05:40:46 2013
@@ -85,17 +85,11 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFOPBitNot;
 import org.apache.hadoop.hive.ql.udf.UDFOPBitOr;
 import org.apache.hadoop.hive.ql.udf.UDFOPBitXor;
-import org.apache.hadoop.hive.ql.udf.UDFOPDivide;
 import org.apache.hadoop.hive.ql.udf.UDFOPLongDivide;
-import org.apache.hadoop.hive.ql.udf.UDFOPMinus;
-import org.apache.hadoop.hive.ql.udf.UDFOPMod;
-import org.apache.hadoop.hive.ql.udf.UDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
 import org.apache.hadoop.hive.ql.udf.UDFPI;
 import org.apache.hadoop.hive.ql.udf.UDFParseUrl;
-import org.apache.hadoop.hive.ql.udf.UDFPosMod;
 import org.apache.hadoop.hive.ql.udf.UDFPower;
 import org.apache.hadoop.hive.ql.udf.UDFRTrim;
 import org.apache.hadoop.hive.ql.udf.UDFRadians;
@@ -209,7 +203,7 @@ public final class FunctionRegistry {
     registerUDF("ceiling", UDFCeil.class, false);
     registerUDF("rand", UDFRand.class, false);
     registerGenericUDF("abs", GenericUDFAbs.class);
-    registerUDF("pmod", UDFPosMod.class, false);
+    registerGenericUDF("pmod", GenericUDFPosMod.class);
 
     registerUDF("ln", UDFLn.class, false);
     registerUDF("log2", UDFLog2.class, false);
@@ -293,11 +287,11 @@ public final class FunctionRegistry {
     registerUDF("xpath_short", UDFXPathShort.class, false);
     registerGenericUDF("xpath", GenericUDFXPath.class);
 
-    registerUDF("+", UDFOPPlus.class, true);
-    registerUDF("-", UDFOPMinus.class, true);
-    registerUDF("*", UDFOPMultiply.class, true);
-    registerUDF("/", UDFOPDivide.class, true);
-    registerUDF("%", UDFOPMod.class, true);
+    registerGenericUDF("+", GenericUDFOPPlus.class);
+    registerGenericUDF("-", GenericUDFOPMinus.class);
+    registerGenericUDF("*", GenericUDFOPMultiply.class);
+    registerGenericUDF("/", GenericUDFOPDivide.class);
+    registerGenericUDF("%", GenericUDFOPMod.class);
     registerUDF("div", UDFOPLongDivide.class, true);
 
     registerUDF("&", UDFOPBitAnd.class, true);
@@ -631,6 +625,52 @@ public final class FunctionRegistry {
     registerNumericType(PrimitiveCategory.STRING, 8);
   }
 
+  /**
+   * Check if the given type is numeric. String is considered numeric when used in
+   * numeric operators.
+   *
+   * @param typeInfo
+   * @return
+   */
+  public static boolean isNumericType(PrimitiveTypeInfo typeInfo) {
+    switch (typeInfo.getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case DECIMAL:
+    case FLOAT:
+    case DOUBLE:
+    case STRING: // String or string equivalent is considered numeric when used in arithmetic operator.
+    case VARCHAR:
+    case CHAR:
+    case VOID: // NULL is considered numeric type for arithmetic operators.
+      return true;
+    default:
+      return false;
+    }
+  }
+
+  /**
+   * Check if a type is exact (not approximate such as float and double). String is considered as
+   * double, thus not exact.
+   *
+   * @param typeInfo
+   * @return
+   */
+  public static boolean isExactNumericType(PrimitiveTypeInfo typeInfo) {
+    switch (typeInfo.getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case DECIMAL:
+      return true;
+    default:
+      return false;
+    }
+  }
+
   static int getCommonLength(int aLen, int bLen) {
     int maxLength;
     if (aLen < 0 || bLen < 0) {
@@ -774,18 +814,7 @@ public final class FunctionRegistry {
     return null;
   }
 
-  /**
-   * Find a common class that objects of both TypeInfo a and TypeInfo b can
-   * convert to. This is used for places other than comparison.
-   *
-   * The common class of string and double is string.
-   *
-   * @return null if no common class could be found.
-   */
-  public static TypeInfo getCommonClass(TypeInfo a, TypeInfo b) {
-    if (a.equals(b)) {
-      return a;
-    }
+  public static PrimitiveCategory getCommonCategory(TypeInfo a, TypeInfo b) {
     if (a.getCategory() != Category.PRIMITIVE || b.getCategory() != Category.PRIMITIVE) {
       return null;
     }
@@ -796,8 +825,7 @@ public final class FunctionRegistry {
     PrimitiveGrouping pgB = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(pcB);
     // handle string types properly
     if (pgA == PrimitiveGrouping.STRING_GROUP && pgB == PrimitiveGrouping.STRING_GROUP) {
-      return getTypeInfoForPrimitiveCategory(
-          (PrimitiveTypeInfo)a, (PrimitiveTypeInfo)b,PrimitiveCategory.STRING);
+      return PrimitiveCategory.STRING;
     }
 
     Integer ai = numericTypes.get(pcA);
@@ -806,8 +834,27 @@ public final class FunctionRegistry {
       // If either is not a numeric type, return null.
       return null;
     }
-    PrimitiveCategory pcCommon = (ai > bi) ? pcA : pcB;
-    return getTypeInfoForPrimitiveCategory((PrimitiveTypeInfo)a, (PrimitiveTypeInfo)b, pcCommon);
+    
+    return (ai > bi) ? pcA : pcB;
+  }
+
+  /**
+   * Find a common class that objects of both TypeInfo a and TypeInfo b can
+   * convert to. This is used for places other than comparison.
+   *
+   * The common class of string and double is string.
+   *
+   * @return null if no common class could be found.
+   */
+  public static TypeInfo getCommonClass(TypeInfo a, TypeInfo b) {
+    if (a.equals(b)) {
+      return a;
+    }
+
+    PrimitiveCategory commonCat = getCommonCategory(a, b);
+    if (commonCat == null)
+      return null;
+    return getTypeInfoForPrimitiveCategory((PrimitiveTypeInfo)a, (PrimitiveTypeInfo)b, commonCat);
   }
 
   public static boolean implicitConvertable(PrimitiveCategory from, PrimitiveCategory to) {
@@ -830,6 +877,7 @@ public final class FunctionRegistry {
     if (from == PrimitiveCategory.VOID) {
       return true;
     }
+
     // Allow implicit String to Date conversion
     if (fromPg == PrimitiveGrouping.DATE_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
       return true;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Wed Nov 20 05:40:46 2013
@@ -33,7 +33,18 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.exec.*;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.GroupByOperator;
+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorFactory;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
@@ -53,9 +64,62 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hadoop.hive.ql.plan.MapWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
-import org.apache.hadoop.hive.ql.udf.*;
+import org.apache.hadoop.hive.ql.udf.UDFAcos;
+import org.apache.hadoop.hive.ql.udf.UDFAsin;
+import org.apache.hadoop.hive.ql.udf.UDFAtan;
+import org.apache.hadoop.hive.ql.udf.UDFBin;
+import org.apache.hadoop.hive.ql.udf.UDFCeil;
+import org.apache.hadoop.hive.ql.udf.UDFConv;
+import org.apache.hadoop.hive.ql.udf.UDFCos;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFDegrees;
+import org.apache.hadoop.hive.ql.udf.UDFExp;
+import org.apache.hadoop.hive.ql.udf.UDFFloor;
+import org.apache.hadoop.hive.ql.udf.UDFHex;
+import org.apache.hadoop.hive.ql.udf.UDFHour;
+import org.apache.hadoop.hive.ql.udf.UDFLTrim;
+import org.apache.hadoop.hive.ql.udf.UDFLength;
+import org.apache.hadoop.hive.ql.udf.UDFLike;
+import org.apache.hadoop.hive.ql.udf.UDFLn;
+import org.apache.hadoop.hive.ql.udf.UDFLog;
+import org.apache.hadoop.hive.ql.udf.UDFLog10;
+import org.apache.hadoop.hive.ql.udf.UDFLog2;
+import org.apache.hadoop.hive.ql.udf.UDFMinute;
+import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
+import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
+import org.apache.hadoop.hive.ql.udf.UDFPower;
+import org.apache.hadoop.hive.ql.udf.UDFRTrim;
+import org.apache.hadoop.hive.ql.udf.UDFRadians;
+import org.apache.hadoop.hive.ql.udf.UDFRand;
+import org.apache.hadoop.hive.ql.udf.UDFRegExp;
+import org.apache.hadoop.hive.ql.udf.UDFSecond;
+import org.apache.hadoop.hive.ql.udf.UDFSign;
+import org.apache.hadoop.hive.ql.udf.UDFSin;
+import org.apache.hadoop.hive.ql.udf.UDFSqrt;
+import org.apache.hadoop.hive.ql.udf.UDFSubstr;
+import org.apache.hadoop.hive.ql.udf.UDFTan;
+import org.apache.hadoop.hive.ql.udf.UDFToBoolean;
+import org.apache.hadoop.hive.ql.udf.UDFToByte;
+import org.apache.hadoop.hive.ql.udf.UDFToDouble;
+import org.apache.hadoop.hive.ql.udf.UDFToFloat;
+import org.apache.hadoop.hive.ql.udf.UDFToInteger;
+import org.apache.hadoop.hive.ql.udf.UDFToLong;
+import org.apache.hadoop.hive.ql.udf.UDFToShort;
+import org.apache.hadoop.hive.ql.udf.UDFToString;
+import org.apache.hadoop.hive.ql.udf.UDFTrim;
+import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
+import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween;
@@ -64,17 +128,23 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLower;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDivide;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPosMod;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper;
@@ -109,11 +179,11 @@ public class Vectorizer implements Physi
 
     supportedGenericUDFs.add(UDFOPNegative.class);
     supportedGenericUDFs.add(UDFOPPositive.class);
-    supportedGenericUDFs.add(UDFOPPlus.class);
-    supportedGenericUDFs.add(UDFOPMinus.class);
-    supportedGenericUDFs.add(UDFOPMultiply.class);
-    supportedGenericUDFs.add(UDFOPDivide.class);
-    supportedGenericUDFs.add(UDFOPMod.class);
+    supportedGenericUDFs.add(GenericUDFOPPlus.class);
+    supportedGenericUDFs.add(GenericUDFOPMinus.class);
+    supportedGenericUDFs.add(GenericUDFOPMultiply.class);
+    supportedGenericUDFs.add(GenericUDFOPDivide.class);
+    supportedGenericUDFs.add(GenericUDFOPMod.class);
 
     supportedGenericUDFs.add(GenericUDFOPEqualOrLessThan.class);
     supportedGenericUDFs.add(GenericUDFOPEqualOrGreaterThan.class);
@@ -159,8 +229,8 @@ public class Vectorizer implements Physi
     supportedGenericUDFs.add(UDFLog10.class);
     supportedGenericUDFs.add(UDFLog.class);
     supportedGenericUDFs.add(UDFPower.class);
-    supportedGenericUDFs.add(UDFPosMod.class);
     supportedGenericUDFs.add(GenericUDFRound.class);
+    supportedGenericUDFs.add(GenericUDFPosMod.class);
     supportedGenericUDFs.add(UDFSqrt.class);
     supportedGenericUDFs.add(UDFSign.class);
     supportedGenericUDFs.add(UDFRand.class);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Wed Nov 20 05:40:46 2013
@@ -63,7 +63,6 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,240 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * GenericUDF Base Class for operations.
+ */
+@Description(name = "op", value = "a op b - Returns the result of operation")
+public abstract class GenericUDFBaseNumeric extends GenericUDF {
+  protected String opName;
+  protected String opDisplayName;
+
+  protected transient PrimitiveObjectInspector leftOI;
+  protected transient PrimitiveObjectInspector rightOI;
+  protected transient PrimitiveObjectInspector resultOI;
+
+  protected transient Converter converterLeft;
+  protected transient Converter  converterRight;
+
+  protected ByteWritable byteWritable = new ByteWritable();
+  protected ShortWritable shortWritable = new ShortWritable();
+  protected IntWritable intWritable = new IntWritable();
+  protected LongWritable longWritable = new LongWritable();
+  protected FloatWritable floatWritable = new FloatWritable();
+  protected DoubleWritable doubleWritable = new DoubleWritable();
+  protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
+
+  public GenericUDFBaseNumeric() {
+    opName = getClass().getSimpleName();
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length != 2) {
+      throw new UDFArgumentException(opName + " requires two arguments.");
+    }
+
+    for (int i = 0; i < 2; i++) {
+      Category category = arguments[i].getCategory();
+      if (category != Category.PRIMITIVE) {
+        throw new UDFArgumentTypeException(i, "The "
+            + GenericUDFUtils.getOrdinal(i + 1)
+            + " argument of " + opName + "  is expected to a "
+            + Category.PRIMITIVE.toString().toLowerCase() + " type, but "
+            + category.toString().toLowerCase() + " is found");
+      }
+    }
+
+    leftOI = (PrimitiveObjectInspector) arguments[0];
+    rightOI = (PrimitiveObjectInspector) arguments[1];
+    resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+        deriveResultTypeInfo());
+    converterLeft = ObjectInspectorConverters.getConverter(leftOI, resultOI);
+    converterRight = ObjectInspectorConverters.getConverter(rightOI, resultOI);
+
+    return resultOI;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (arguments[0] == null || arguments[1] == null) {
+      return null;
+    }
+
+    Object left = arguments[0].get();
+    Object right = arguments[1].get();
+    if (left == null && right == null) {
+      return null;
+    }
+
+    left = converterLeft.convert(left);
+    if (left == null) {
+      return null;
+    }
+    right = converterRight.convert(right);
+    if (right == null) {
+      return null;
+    }
+
+    switch (resultOI.getPrimitiveCategory()) {
+    case BYTE:
+      return evaluate((ByteWritable) left, (ByteWritable) right);
+    case SHORT:
+      return evaluate((ShortWritable) left, (ShortWritable) right);
+    case INT:
+      return evaluate((IntWritable) left, (IntWritable) right);
+    case LONG:
+      return evaluate((LongWritable) left, (LongWritable) right);
+    case FLOAT:
+      return evaluate((FloatWritable) left, (FloatWritable) right);
+    case DOUBLE:
+      return evaluate((DoubleWritable) left, (DoubleWritable) right);
+    case DECIMAL:
+      return resultOI.getPrimitiveWritableObject(
+          evaluate((HiveDecimalWritable) left, (HiveDecimalWritable) right));
+    default:
+      // Should never happen.
+      throw new RuntimeException("Unexpected type in evaluating " + opName + ": " +
+        resultOI.getPrimitiveCategory());
+    }
+  }
+
+  protected ByteWritable evaluate(ByteWritable left, ByteWritable right) {
+    return null;
+  }
+
+  protected ShortWritable evaluate(ShortWritable left, ShortWritable right) {
+    return null;
+  }
+
+  protected IntWritable evaluate(IntWritable left, IntWritable right) {
+    return null;
+  }
+
+  protected LongWritable evaluate(LongWritable left, LongWritable right) {
+    return null;
+  }
+
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    return null;
+  }
+
+  protected FloatWritable evaluate(FloatWritable left, FloatWritable right) {
+    return null;
+  }
+
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    return null;
+  }
+
+  /**
+   * Default implementation for deriving typeinfo instance for the operator result.
+   *
+   * @param leftOI TypeInfo instance of the left operand
+   * @param rightOI TypeInfo instance of the right operand
+   * @return
+   * @throws UDFArgumentException
+   */
+  private PrimitiveTypeInfo deriveResultTypeInfo() throws UDFArgumentException {
+    PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI);
+    PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI);
+    if (!FunctionRegistry.isNumericType(left) || !FunctionRegistry.isNumericType(right)) {
+      List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
+      argTypeInfos.add(left);
+      argTypeInfos.add(right);
+      throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
+    }
+
+    // If any of the type isn't exact, double is chosen.
+    if (!FunctionRegistry.isExactNumericType(left) || !FunctionRegistry.isExactNumericType(right)) {
+      return TypeInfoFactory.doubleTypeInfo;
+    }
+
+    return deriveResultExactTypeInfo();
+  }
+
+  /**
+   * Default implementation for getting the exact type info for the operator result. It worked for all
+   * but divide operator.
+   *
+   * @return
+   */
+  protected PrimitiveTypeInfo deriveResultExactTypeInfo() {
+    PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI);
+    PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI);
+
+    // Now we are handling exact types. Base implementation handles type promotion.
+    PrimitiveCategory commonCat = FunctionRegistry.getCommonCategory(left, right);
+    if (commonCat == PrimitiveCategory.DECIMAL) {
+      return deriveResultDecimalTypeInfo();
+    } else {
+      return left.getPrimitiveCategory() == commonCat ? left : right;
+    }
+  }
+
+  /**
+   * Derive the object inspector instance for the decimal result of the operator.
+   */
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo() {
+    int prec1 = leftOI.precision();
+    int prec2 = rightOI.precision();
+    int scale1 = leftOI.scale();
+    int scale2 = rightOI.scale();
+    return deriveResultDecimalTypeInfo(prec1, scale1, prec2, scale2);
+  }
+
+  protected abstract DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2);
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 2);
+    return "(" + children[0] + " " + opDisplayName + " " + children[1] + ")";
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColDivideLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarDivideDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarDivideLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColDivideDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarDivideDoubleColumn;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+/**
+ * Note that in SQL, the return type of divide is not necessarily the same
+ * as the parameters. For example, 3 / 2 = 1.5, not 1. To follow SQL, we always
+ * return a decimal for divide.
+ */
+@Description(name = "/", value = "a _FUNC_ b - Divide a by b", extended = "Example:\n"
+    + "  > SELECT 3 _FUNC_ 2 FROM src LIMIT 1;\n" + "  1.5")
+@VectorizedExpressions({LongColDivideLongColumn.class, LongColDivideDoubleColumn.class,
+  DoubleColDivideLongColumn.class, DoubleColDivideDoubleColumn.class,
+  LongColDivideLongScalar.class, LongColDivideDoubleScalar.class,
+  DoubleColDivideLongScalar.class, DoubleColDivideDoubleScalar.class,
+  LongScalarDivideLongColumn.class, LongScalarDivideDoubleColumn.class,
+  DoubleScalarDivideLongColumn.class, DoubleScalarDivideDoubleColumn.class})
+public class GenericUDFOPDivide extends GenericUDFBaseNumeric {
+
+  public GenericUDFOPDivide() {
+    super();
+    this.opDisplayName = "/";
+  }
+
+  @Override
+  protected PrimitiveTypeInfo deriveResultExactTypeInfo() {
+    // No type promotion. Everything goes to decimal.
+    return deriveResultDecimalTypeInfo();
+  }
+
+  @Override
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    if (right.get() == 0.0) {
+      return null;
+    }
+    doubleWritable.set(left.get() / right.get());
+    return doubleWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    HiveDecimal hd1 = left.getHiveDecimal();
+    HiveDecimal hd2 = right.getHiveDecimal();
+    if (hd2.compareTo(HiveDecimal.ZERO) == 0) {
+      return null;
+    }
+    HiveDecimal dec = hd1.divide(hd2);
+    decimalWritable.set(dec);
+    return decimalWritable;
+  }
+
+  @Override
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2) {
+    int scale = Math.min(HiveDecimal.MAX_SCALE, Math.max(6, scale1 + prec2 + 1));
+    int prec = Math.min(HiveDecimal.MAX_PRECISION, prec1 - scale1 + scale2 + scale);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColSubtractLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarSubtractLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarSubtractLongColumn;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+@Description(name = "-", value = "a _FUNC_ b - Returns the difference a-b")
+@VectorizedExpressions({LongColSubtractLongColumn.class, LongColSubtractDoubleColumn.class,
+  DoubleColSubtractLongColumn.class, DoubleColSubtractDoubleColumn.class,
+  LongColSubtractLongScalar.class, LongColSubtractDoubleScalar.class,
+  DoubleColSubtractLongScalar.class, DoubleColSubtractDoubleScalar.class,
+  LongScalarSubtractLongColumn.class, LongScalarSubtractDoubleColumn.class,
+  DoubleScalarSubtractLongColumn.class, DoubleScalarSubtractDoubleColumn.class})
+public class GenericUDFOPMinus extends GenericUDFBaseNumeric {
+
+  public GenericUDFOPMinus() {
+    super();
+    this.opDisplayName = "-";
+  }
+
+  @Override
+  protected ByteWritable evaluate(ByteWritable left, ByteWritable right) {
+    byteWritable.set((byte)(left.get() - right.get()));
+    return byteWritable;
+  }
+
+  @Override
+  protected ShortWritable evaluate(ShortWritable left, ShortWritable right) {
+    shortWritable.set((short)(left.get() - right.get()));
+    return shortWritable;
+  }
+
+  @Override
+  protected IntWritable evaluate(IntWritable left, IntWritable right) {
+    intWritable.set(left.get() - right.get());
+    return intWritable;
+  }
+
+  @Override
+  protected LongWritable evaluate(LongWritable left, LongWritable right) {
+    longWritable.set(left.get() - right.get());
+    return longWritable;
+  }
+
+  @Override
+  protected FloatWritable evaluate(FloatWritable left, FloatWritable right) {
+    floatWritable.set(left.get() - right.get());
+    return floatWritable;
+  }
+
+  @Override
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    doubleWritable.set(left.get() - right.get());
+    return doubleWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    HiveDecimal dec = left.getHiveDecimal().subtract(right.getHiveDecimal());
+    if (dec == null) {
+      return null;
+    }
+    decimalWritable.set(dec);
+    return decimalWritable;
+  }
+
+  @Override
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2) {
+    int intPart = Math.max(prec1 - scale1, prec2 - scale2);
+    int scale = Math.max(scale1, scale2);
+    int prec =  Math.min(intPart + scale + 1, HiveDecimal.MAX_PRECISION);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMod.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColModuloLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarModuloDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarModuloLongColumn;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+@Description(name = "%", value = "a _FUNC_ b - Returns the remainder when dividing a by b")
+@VectorizedExpressions({LongColModuloLongColumn.class, LongColModuloDoubleColumn.class,
+  DoubleColModuloLongColumn.class, DoubleColModuloDoubleColumn.class,
+  LongColModuloLongScalar.class, LongColModuloDoubleScalar.class,
+  DoubleColModuloLongScalar.class, DoubleColModuloDoubleScalar.class,
+  LongScalarModuloLongColumn.class, LongScalarModuloDoubleColumn.class,
+  DoubleScalarModuloLongColumn.class, DoubleScalarModuloDoubleColumn.class})
+public class GenericUDFOPMod extends GenericUDFBaseNumeric {
+
+  public GenericUDFOPMod() {
+    super();
+    this.opDisplayName = "%";
+  }
+
+  @Override
+  protected ByteWritable evaluate(ByteWritable left, ByteWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    byteWritable.set((byte)(left.get() % right.get()));
+    return byteWritable;
+  }
+
+  @Override
+  protected ShortWritable evaluate(ShortWritable left, ShortWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    shortWritable.set((short)(left.get() % right.get()));
+    return shortWritable;
+  }
+
+  @Override
+  protected IntWritable evaluate(IntWritable left, IntWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    intWritable.set(left.get() % right.get());
+    return intWritable;
+  }
+
+  @Override
+  protected LongWritable evaluate(LongWritable left, LongWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    longWritable.set(left.get() % right.get());
+    return longWritable;
+  }
+
+  @Override
+  protected FloatWritable evaluate(FloatWritable left, FloatWritable right) {
+    if (right.get() == 0.0f) {
+      return null;
+    }
+    floatWritable.set(left.get() % right.get());
+    return floatWritable;
+  }
+
+  @Override
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    if (right.get() == 0.0) {
+      return null;
+    }
+    doubleWritable.set(left.get() % right.get());
+    return doubleWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    HiveDecimal hd1 = left.getHiveDecimal();
+    HiveDecimal hd2 = right.getHiveDecimal();
+    if (hd2.compareTo(HiveDecimal.ZERO) == 0) {
+      return null;
+    }
+
+    HiveDecimal dec = hd1.remainder(hd2);
+    if (dec == null) {
+      return null;
+    }
+    decimalWritable.set(dec);
+    return decimalWritable;
+  }
+
+  @Override
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2) {
+    int scale = Math.max(scale1, scale2);
+    int prec = Math.min(HiveDecimal.MAX_PRECISION, Math.min(prec1 - scale1, prec2 - scale2) + scale);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMultiply.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColMultiplyLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarMultiplyLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarMultiplyDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarMultiplyLongColumn;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+@Description(name = "*", value = "a _FUNC_ b - Multiplies a by b")
+@VectorizedExpressions({LongColMultiplyLongColumn.class, LongColMultiplyDoubleColumn.class,
+  DoubleColMultiplyLongColumn.class, DoubleColMultiplyDoubleColumn.class,
+  LongColMultiplyLongScalar.class, LongColMultiplyDoubleScalar.class,
+  DoubleColMultiplyLongScalar.class, DoubleColMultiplyDoubleScalar.class,
+  LongScalarMultiplyLongColumn.class, LongScalarMultiplyDoubleColumn.class,
+  DoubleScalarMultiplyLongColumn.class, DoubleScalarMultiplyDoubleColumn.class})
+public class GenericUDFOPMultiply extends GenericUDFBaseNumeric {
+
+  public GenericUDFOPMultiply() {
+    super();
+    this.opDisplayName = "*";
+  }
+
+  @Override
+  protected ByteWritable evaluate(ByteWritable left, ByteWritable right) {
+    byteWritable.set((byte)(left.get() * right.get()));
+    return byteWritable;
+  }
+
+  @Override
+  protected ShortWritable evaluate(ShortWritable left, ShortWritable right) {
+    shortWritable.set((short)(left.get() * right.get()));
+    return shortWritable;
+  }
+
+  @Override
+  protected IntWritable evaluate(IntWritable left, IntWritable right) {
+    intWritable.set(left.get() * right.get());
+    return intWritable;
+  }
+
+  @Override
+  protected LongWritable evaluate(LongWritable left, LongWritable right) {
+    longWritable.set(left.get() * right.get());
+    return longWritable;
+  }
+
+  @Override
+  protected FloatWritable evaluate(FloatWritable left, FloatWritable right) {
+    floatWritable.set(left.get() * right.get());
+    return floatWritable;
+  }
+
+  @Override
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    doubleWritable.set(left.get() * right.get());
+    return doubleWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    HiveDecimal dec = left.getHiveDecimal().multiply(right.getHiveDecimal());
+    if (dec == null) {
+      return null;
+    }
+    decimalWritable.set(dec);
+    return decimalWritable;
+  }
+
+  @Override
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2) {
+    int scale = Math.min(HiveDecimal.MAX_SCALE, scale1 + scale2 );
+    int prec = Math.min(HiveDecimal.MAX_PRECISION, prec1 + prec2 + 1);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColAddLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleScalarAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarAddDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongScalarAddLongColumn;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * The reason that we list evaluate methods with all numeric types is for both
+ * better performance and type checking (so we know int + int is still an int
+ * instead of a double); otherwise a single method that takes (Number a, Number
+ * b) and use a.doubleValue() == b.doubleValue() is enough.
+ *
+ * The case of int + double will be handled by implicit type casting using
+ * UDFRegistry.implicitConvertable method.
+ */
+@Description(name = "+", value = "a _FUNC_ b - Returns a+b")
+@VectorizedExpressions({LongColAddLongColumn.class, LongColAddDoubleColumn.class,
+  DoubleColAddLongColumn.class, DoubleColAddDoubleColumn.class, LongColAddLongScalar.class,
+  LongColAddDoubleScalar.class, DoubleColAddLongScalar.class, DoubleColAddDoubleScalar.class,
+  LongScalarAddLongColumn.class, LongScalarAddDoubleColumn.class, DoubleScalarAddLongColumn.class,
+  DoubleScalarAddDoubleColumn.class})
+public class GenericUDFOPPlus extends GenericUDFBaseNumeric {
+
+  public GenericUDFOPPlus() {
+    super();
+    this.opDisplayName = "+";
+  }
+
+  @Override
+  protected ByteWritable evaluate(ByteWritable left, ByteWritable right) {
+    byteWritable.set((byte)(left.get() + right.get()));
+    return byteWritable;
+  }
+
+  @Override
+  protected ShortWritable evaluate(ShortWritable left, ShortWritable right) {
+    shortWritable.set((short)(left.get() + right.get()));
+    return shortWritable;
+  }
+
+  @Override
+  protected IntWritable evaluate(IntWritable left, IntWritable right) {
+    intWritable.set(left.get() + right.get());
+    return intWritable;
+  }
+
+  @Override
+  protected LongWritable evaluate(LongWritable left, LongWritable right) {
+    longWritable.set(left.get() + right.get());
+    return longWritable;
+  }
+
+  @Override
+  protected FloatWritable evaluate(FloatWritable left, FloatWritable right) {
+    floatWritable.set(left.get() + right.get());
+    return floatWritable;
+  }
+
+  @Override
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    doubleWritable.set(left.get() + right.get());
+    return doubleWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    HiveDecimal dec = left.getHiveDecimal().add(right.getHiveDecimal());
+    if (dec == null) {
+      return null;
+    }
+    decimalWritable.set(dec);
+    return decimalWritable;
+  }
+
+  @Override
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2) {
+    int intPart = Math.max(prec1 - scale1, prec2 - scale2);
+    int scale = Math.max(scale1, scale2);
+    int prec =  Math.min(intPart + scale + 1, HiveDecimal.MAX_PRECISION);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPosMod.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPosMod.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPosMod.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPosMod.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.PosModDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.PosModLongToLong;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * class for computing positive modulo. Used for positive_mod command in Cli See
+ * {org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod} See
+ * {org.apache.hadoop.hive.ql.exec.FunctionRegistry}
+ */
+@Description(name = "pmod", value = "a _FUNC_ b - Compute the positive modulo")
+@VectorizedExpressions({PosModLongToLong.class, PosModDoubleToDouble.class})
+public class GenericUDFPosMod extends GenericUDFBaseNumeric {
+
+  public GenericUDFPosMod() {
+    super();
+    this.opDisplayName = "pmod";
+  }
+
+  @Override
+  protected ByteWritable evaluate(ByteWritable left, ByteWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    byteWritable.set((byte) (((left.get() % right.get()) + right.get()) % right.get()));
+    return byteWritable;
+  }
+
+  @Override
+  protected ShortWritable evaluate(ShortWritable left, ShortWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    shortWritable.set((short) (((left.get() % right.get()) + right.get()) % right.get()));
+    return shortWritable;
+  }
+
+  @Override
+  protected IntWritable evaluate(IntWritable left, IntWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    intWritable.set((((left.get() % right.get()) + right.get()) % right.get()));
+    return intWritable;
+  }
+
+  @Override
+  protected LongWritable evaluate(LongWritable left, LongWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    longWritable.set(((left.get() % right.get()) + right.get()) % right.get());
+    return longWritable;
+  }
+
+  @Override
+  protected FloatWritable evaluate(FloatWritable left, FloatWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    floatWritable.set(((left.get() % right.get()) + right.get()) % right.get());
+    return floatWritable;
+  }
+
+  @Override
+  protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
+    if (right.get() == 0) {
+      return null;
+    }
+    doubleWritable.set(((left.get() % right.get()) + right.get()) % right.get());
+    return doubleWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable left, HiveDecimalWritable right) {
+    HiveDecimal hd1 = left.getHiveDecimal();
+    HiveDecimal hd2 = right.getHiveDecimal();
+    if (hd2.compareTo(HiveDecimal.ZERO) == 0) {
+      return null;
+    }
+    HiveDecimal dec = hd1.remainder(hd2).add(hd2).remainder(hd2);
+    if (dec == null) {
+      return null;
+    }
+    decimalWritable.set(dec);
+    return decimalWritable;
+  }
+
+  @Override
+  protected DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2) {
+    int scale = Math.max(scale1, scale2);
+    int prec = Math.min(HiveDecimal.MAX_PRECISION, Math.max(prec1 - scale1, prec2 - scale2) + scale);
+    return TypeInfoFactory.getDecimalTypeInfo(prec, scale);
+  }
+
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java Wed Nov 20 05:40:46 2013
@@ -43,7 +43,10 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.Text;
 
@@ -140,6 +143,18 @@ public final class GenericUDFUtils {
         return false;
       }
 
+      /**
+       * TODO: Hack fix until HIVE-5848 is addressed. non-exact type shouldn't be promoted
+       * to exact type, as FunctionRegistry.getCommonClass() might do. This corrects
+       * that.
+       */
+      if (commonTypeInfo instanceof DecimalTypeInfo) {
+        if ((!FunctionRegistry.isExactNumericType((PrimitiveTypeInfo) oiTypeInfo)) || 
+            (!FunctionRegistry.isExactNumericType((PrimitiveTypeInfo) rTypeInfo))) {
+          commonTypeInfo = TypeInfoFactory.doubleTypeInfo;
+        }
+      }
+
       returnObjectInspector = TypeInfoUtils
           .getStandardWritableObjectInspectorFromTypeInfo(commonTypeInfo);
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java Wed Nov 20 05:40:46 2013
@@ -32,9 +32,8 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
-import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -95,7 +94,8 @@ public class TestVectorSelectOperator {
     ExprNodeColumnDesc colDesc2 = new ExprNodeColumnDesc(Long.class, "b", "table", false);
     ExprNodeColumnDesc colDesc3 = new ExprNodeColumnDesc(Long.class, "c", "table", false);
     ExprNodeGenericFuncDesc plusDesc = new ExprNodeGenericFuncDesc();
-    GenericUDF gudf = new GenericUDFBridge("+", true, UDFOPPlus.class.getCanonicalName());
+    GenericUDF gudf = new GenericUDFOPPlus();
+
     plusDesc.setGenericUDF(gudf);
     List<ExprNodeDesc> children = new  ArrayList<ExprNodeDesc>();
     children.add(colDesc1);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1543711&r1=1543710&r2=1543711&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Wed Nov 20 05:40:46 2013
@@ -85,11 +85,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.UDFLTrim;
 import org.apache.hadoop.hive.ql.udf.UDFLog;
-import org.apache.hadoop.hive.ql.udf.UDFOPMinus;
-import org.apache.hadoop.hive.ql.udf.UDFOPMod;
-import org.apache.hadoop.hive.ql.udf.UDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.UDFPower;
 import org.apache.hadoop.hive.ql.udf.UDFSin;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
@@ -102,11 +98,15 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -159,11 +159,11 @@ public class TestVectorizationContext {
      * Create original expression tree for following
      * (plus (minus (plus col1 col2) col3) (multiply col4 (mod col5 col6)) )
      */
-    GenericUDFBridge udf1 = new GenericUDFBridge("+", true, UDFOPPlus.class.getCanonicalName());
-    GenericUDFBridge udf2 = new GenericUDFBridge("-", true, UDFOPMinus.class.getCanonicalName());
-    GenericUDFBridge udf3 = new GenericUDFBridge("*", true, UDFOPMultiply.class.getCanonicalName());
-    GenericUDFBridge udf4 = new GenericUDFBridge("+", true, UDFOPPlus.class.getCanonicalName());
-    GenericUDFBridge udf5 = new GenericUDFBridge("%", true, UDFOPMod.class.getCanonicalName());
+    GenericUDFOPPlus udf1 = new GenericUDFOPPlus();
+    GenericUDFOPMinus udf2 = new GenericUDFOPMinus();
+    GenericUDFOPMultiply udf3 = new GenericUDFOPMultiply();
+    GenericUDFOPPlus udf4 = new GenericUDFOPPlus();
+    GenericUDFOPMod udf5 = new GenericUDFOPMod();
 
     ExprNodeGenericFuncDesc sumExpr = new ExprNodeGenericFuncDesc();
     sumExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
@@ -224,7 +224,6 @@ public class TestVectorizationContext {
 
     //Generate vectorized expression
     VectorizationContext vc = new VectorizationContext(columnMap, 6);
-
     VectorExpression ve = vc.getVectorExpression(sumExpr, VectorExpressionDescriptor.Mode.PROJECTION);
 
     //Verify vectorized expression
@@ -232,6 +231,7 @@ public class TestVectorizationContext {
     assertEquals(2, ve.getChildExpressions().length);
     VectorExpression childExpr1 = ve.getChildExpressions()[0];
     VectorExpression childExpr2 = ve.getChildExpressions()[1];
+    System.out.println(ve.toString());
     assertEquals(6, ve.getOutputColumn());
 
     assertTrue(childExpr1 instanceof LongColSubtractLongColumn);
@@ -300,7 +300,7 @@ public class TestVectorizationContext {
     ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Float.class, "col1", "table", false);
     ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
 
-    GenericUDFBridge udf = new GenericUDFBridge("+", false, UDFOPPlus.class.getCanonicalName());
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
     ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc();
     exprDesc.setGenericUDF(udf);
 
@@ -571,7 +571,7 @@ public class TestVectorizationContext {
   @Test
   public void testVectorizeScalarColumnExpression() throws HiveException {
     ExprNodeGenericFuncDesc scalarMinusConstant = new ExprNodeGenericFuncDesc();
-    GenericUDF gudf = new GenericUDFBridge("-", true, UDFOPMinus.class.getCanonicalName());
+    GenericUDFOPMinus gudf = new GenericUDFOPMinus();
     scalarMinusConstant.setGenericUDF(gudf);
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
     ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, 20);

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java?rev=1543711&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java Wed Nov 20 05:40:46 2013
@@ -0,0 +1,203 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestGenericUDFOPDivide {
+
+  @Test
+  public void testByteDivideShort() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    ByteWritable left = new ByteWritable((byte) 4);
+    ShortWritable right = new ShortWritable((short) 6);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableByteObjectInspector,
+        PrimitiveObjectInspectorFactory.writableShortObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.getDecimalTypeInfo(9, 6));
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("0.666667"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testVarcharDivideInt() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    HiveVarcharWritable left = new HiveVarcharWritable();
+    left.set("123");
+    IntWritable right = new IntWritable(456);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(123.0 / 456.0), new Double(res.get()));
+  }
+
+  @Test
+  public void testDoubleDivideLong() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    DoubleWritable left = new DoubleWritable(4.5);
+    LongWritable right = new LongWritable(10);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+        PrimitiveObjectInspectorFactory.writableLongObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(0.45), new Double(res.get()));
+  }
+
+  @Test
+  public void testLongDivideDecimal() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    LongWritable left = new LongWritable(104);
+    HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableLongObjectInspector,
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(9, 4))
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(33, 10), oi.getTypeInfo());
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("0.4426096949"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testFloatDivideFloat() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    FloatWritable f1 = new FloatWritable(4.5f);
+    FloatWritable f2 = new FloatWritable(1.5f);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
+        PrimitiveObjectInspectorFactory.writableFloatObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(f1),
+        new DeferredJavaObject(f2),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(3.0), new Double(res.get()));
+  }
+
+  @Test
+  public void testDouleDivideDecimal() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    DoubleWritable left = new DoubleWritable(74.52);
+    HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2))
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(74.52 / 234.97), new Double(res.get()));
+  }
+
+  @Test
+  public void testDecimalDivideDecimal() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("14.5"));
+    HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3, 1)),
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2))
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11, 7), oi.getTypeInfo());
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("0.06171"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testDecimalDivideDecimalSameParams() throws HiveException {
+    GenericUDFOPDivide udf = new GenericUDFOPDivide();
+
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2)),
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2))
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(13, 8), oi.getTypeInfo());
+  }
+
+}