You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/11/27 19:52:41 UTC

svn commit: r1546157 [1/2] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/exec/vector/ java/org/apache/hadoop/hive/ql/optimizer/physical/ java/org/apache/hadoop/hive/ql/udf/ java/org/apache/hadoop/hive/ql/u...

Author: brock
Date: Wed Nov 27 18:52:41 2013
New Revision: 1546157

URL: http://svn.apache.org/r1546157
Log:
HIVE-5706 - Move a few numeric UDFs to generic implementations (Xuefu Zhang via Brock Noland)

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCeil.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java
Removed:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
    hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/literal_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf4.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf7.q.out
    hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
    hive/trunk/ql/src/test/results/clientpositive/vectorized_math_funcs.q.out
    hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1546157&r1=1546156&r2=1546157&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Wed Nov 27 18:52:41 2013
@@ -54,7 +54,6 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFAtan;
 import org.apache.hadoop.hive.ql.udf.UDFBase64;
 import org.apache.hadoop.hive.ql.udf.UDFBin;
-import org.apache.hadoop.hive.ql.udf.UDFCeil;
 import org.apache.hadoop.hive.ql.udf.UDFConv;
 import org.apache.hadoop.hive.ql.udf.UDFCos;
 import org.apache.hadoop.hive.ql.udf.UDFDate;
@@ -66,7 +65,6 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFE;
 import org.apache.hadoop.hive.ql.udf.UDFExp;
 import org.apache.hadoop.hive.ql.udf.UDFFindInSet;
-import org.apache.hadoop.hive.ql.udf.UDFFloor;
 import org.apache.hadoop.hive.ql.udf.UDFFromUnixTime;
 import org.apache.hadoop.hive.ql.udf.UDFHex;
 import org.apache.hadoop.hive.ql.udf.UDFHour;
@@ -86,11 +84,8 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFOPBitOr;
 import org.apache.hadoop.hive.ql.udf.UDFOPBitXor;
 import org.apache.hadoop.hive.ql.udf.UDFOPLongDivide;
-import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
 import org.apache.hadoop.hive.ql.udf.UDFPI;
 import org.apache.hadoop.hive.ql.udf.UDFParseUrl;
-import org.apache.hadoop.hive.ql.udf.UDFPower;
 import org.apache.hadoop.hive.ql.udf.UDFRTrim;
 import org.apache.hadoop.hive.ql.udf.UDFRadians;
 import org.apache.hadoop.hive.ql.udf.UDFRand;
@@ -197,10 +192,10 @@ public final class FunctionRegistry {
     registerGenericUDF("size", GenericUDFSize.class);
 
     registerGenericUDF("round", GenericUDFRound.class);
-    registerUDF("floor", UDFFloor.class, false);
+    registerGenericUDF("floor", GenericUDFFloor.class);
     registerUDF("sqrt", UDFSqrt.class, false);
-    registerUDF("ceil", UDFCeil.class, false);
-    registerUDF("ceiling", UDFCeil.class, false);
+    registerGenericUDF("ceil", GenericUDFCeil.class);
+    registerGenericUDF("ceiling", GenericUDFCeil.class);
     registerUDF("rand", UDFRand.class, false);
     registerGenericUDF("abs", GenericUDFAbs.class);
     registerGenericUDF("pmod", GenericUDFPosMod.class);
@@ -214,8 +209,8 @@ public final class FunctionRegistry {
     registerUDF("log10", UDFLog10.class, false);
     registerUDF("log", UDFLog.class, false);
     registerUDF("exp", UDFExp.class, false);
-    registerUDF("power", UDFPower.class, false);
-    registerUDF("pow", UDFPower.class, false);
+    registerGenericUDF("power", GenericUDFPower.class);
+    registerGenericUDF("pow", GenericUDFPower.class);
     registerUDF("sign", UDFSign.class, false);
     registerUDF("pi", UDFPI.class, false);
     registerUDF("degrees", UDFDegrees.class, false);
@@ -257,8 +252,8 @@ public final class FunctionRegistry {
     registerGenericUDF("str_to_map", GenericUDFStringToMap.class);
     registerGenericUDF("translate", GenericUDFTranslate.class);
 
-    registerUDF("positive", UDFOPPositive.class, true, "+");
-    registerUDF("negative", UDFOPNegative.class, true, "-");
+    registerGenericUDF("positive", GenericUDFOPPositive.class);
+    registerGenericUDF("negative", GenericUDFOPNegative.class);
 
     registerUDF("day", UDFDayOfMonth.class, false);
     registerUDF("dayofmonth", UDFDayOfMonth.class, false);
@@ -1430,17 +1425,12 @@ public final class FunctionRegistry {
    * Get the UDF class from an exprNodeDesc. Returns null if the exprNodeDesc
    * does not contain a UDF class.
    */
-  private static Class<? extends UDF> getUDFClassFromExprDesc(ExprNodeDesc desc) {
+  private static Class<? extends GenericUDF> getUDFClassFromExprDesc(ExprNodeDesc desc) {
     if (!(desc instanceof ExprNodeGenericFuncDesc)) {
       return null;
     }
     ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) desc;
-    if (!(genericFuncDesc.getGenericUDF() instanceof GenericUDFBridge)) {
-      return null;
-    }
-    GenericUDFBridge bridge = (GenericUDFBridge) (genericFuncDesc
-        .getGenericUDF());
-    return bridge.getUdfClass();
+    return genericFuncDesc.getGenericUDF().getClass();
   }
 
   /**
@@ -1533,8 +1523,8 @@ public final class FunctionRegistry {
    * Returns whether the exprNodeDesc is a node of "positive".
    */
   public static boolean isOpPositive(ExprNodeDesc desc) {
-    Class<? extends UDF> udfClass = getUDFClassFromExprDesc(desc);
-    return UDFOPPositive.class == udfClass;
+    Class<? extends GenericUDF> udfClass = getUDFClassFromExprDesc(desc);
+    return GenericUDFOPPositive.class == udfClass;
   }
 
   /**

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1546157&r1=1546156&r2=1546157&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Wed Nov 27 18:52:41 2013
@@ -77,11 +77,8 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
 import org.apache.hadoop.hive.ql.udf.UDFConv;
 import org.apache.hadoop.hive.ql.udf.UDFHex;
-import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
 import org.apache.hadoop.hive.ql.udf.UDFToBoolean;
 import org.apache.hadoop.hive.ql.udf.UDFToByte;
 import org.apache.hadoop.hive.ql.udf.UDFToDouble;
@@ -355,22 +352,15 @@ public class VectorizationContext {
     }
 
     GenericUDF gudf = ((ExprNodeGenericFuncDesc) exprDesc).getGenericUDF();
-    if (!(gudf instanceof GenericUDFBridge)) {
-      return exprDesc;
-    }
-
-    Class<? extends UDF> cl = ((GenericUDFBridge) gudf).getUdfClass();
-
-    if (cl.equals(UDFOPNegative.class) || cl.equals(UDFOPPositive.class)) {
+    if (gudf instanceof GenericUDFOPNegative || gudf instanceof GenericUDFOPPositive) {
       ExprNodeEvaluator<?> evaluator = ExprNodeEvaluatorFactory.get(exprDesc);
       ObjectInspector output = evaluator.initialize(null);
-
       Object constant = evaluator.evaluate(null);
       Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
       return new ExprNodeConstantDesc(java);
-    } else {
-      return exprDesc;
     }
+
+    return exprDesc;
   }
 
   /* Fold simple unary expressions in all members of the input list and return new list
@@ -561,6 +551,8 @@ public class VectorizationContext {
       return getBetweenFilterExpression(childExpr, mode);
     } else if (udf instanceof GenericUDFIn) {
       return getInFilterExpression(childExpr);
+    } else if (udf instanceof GenericUDFOPPositive) {
+      return getIdentityExpression(childExpr);
     } else if (udf instanceof GenericUDFBridge) {
       VectorExpression v = getGenericUDFBridgeVectorExpression((GenericUDFBridge) udf, childExpr, mode);
       if (v != null) {
@@ -670,9 +662,7 @@ public class VectorizationContext {
   private VectorExpression getGenericUDFBridgeVectorExpression(GenericUDFBridge udf,
       List<ExprNodeDesc> childExpr, Mode mode) throws HiveException {
     Class<? extends UDF> cl = udf.getUdfClass();
-    if (cl.equals(UDFOPPositive.class)) {
-      return getIdentityExpression(childExpr);
-    } else if (isCastToIntFamily(cl)) {
+    if (isCastToIntFamily(cl)) {
       return getCastToLongExpression(childExpr);
     } else if (cl.equals(UDFToBoolean.class)) {
       return getCastToBoolean(childExpr);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1546157&r1=1546156&r2=1546157&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Wed Nov 27 18:52:41 2013
@@ -82,13 +82,11 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFAsin;
 import org.apache.hadoop.hive.ql.udf.UDFAtan;
 import org.apache.hadoop.hive.ql.udf.UDFBin;
-import org.apache.hadoop.hive.ql.udf.UDFCeil;
 import org.apache.hadoop.hive.ql.udf.UDFConv;
 import org.apache.hadoop.hive.ql.udf.UDFCos;
 import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
 import org.apache.hadoop.hive.ql.udf.UDFDegrees;
 import org.apache.hadoop.hive.ql.udf.UDFExp;
-import org.apache.hadoop.hive.ql.udf.UDFFloor;
 import org.apache.hadoop.hive.ql.udf.UDFHex;
 import org.apache.hadoop.hive.ql.udf.UDFHour;
 import org.apache.hadoop.hive.ql.udf.UDFLTrim;
@@ -99,9 +97,6 @@ import org.apache.hadoop.hive.ql.udf.UDF
 import org.apache.hadoop.hive.ql.udf.UDFLog10;
 import org.apache.hadoop.hive.ql.udf.UDFLog2;
 import org.apache.hadoop.hive.ql.udf.UDFMinute;
-import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
-import org.apache.hadoop.hive.ql.udf.UDFPower;
 import org.apache.hadoop.hive.ql.udf.UDFRTrim;
 import org.apache.hadoop.hive.ql.udf.UDFRadians;
 import org.apache.hadoop.hive.ql.udf.UDFRand;
@@ -127,7 +122,9 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCeil;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFloor;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLower;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
@@ -140,11 +137,14 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPositive;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPower;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPosMod;
@@ -180,13 +180,13 @@ public class Vectorizer implements Physi
     supportedDataTypes.add("float");
     supportedDataTypes.add("double");
 
-    supportedGenericUDFs.add(UDFOPNegative.class);
-    supportedGenericUDFs.add(UDFOPPositive.class);
     supportedGenericUDFs.add(GenericUDFOPPlus.class);
     supportedGenericUDFs.add(GenericUDFOPMinus.class);
     supportedGenericUDFs.add(GenericUDFOPMultiply.class);
     supportedGenericUDFs.add(GenericUDFOPDivide.class);
     supportedGenericUDFs.add(GenericUDFOPMod.class);
+    supportedGenericUDFs.add(GenericUDFOPNegative.class);
+    supportedGenericUDFs.add(GenericUDFOPPositive.class);
 
     supportedGenericUDFs.add(GenericUDFOPEqualOrLessThan.class);
     supportedGenericUDFs.add(GenericUDFOPEqualOrGreaterThan.class);
@@ -224,14 +224,14 @@ public class Vectorizer implements Physi
     supportedGenericUDFs.add(UDFAtan.class);
     supportedGenericUDFs.add(UDFDegrees.class);
     supportedGenericUDFs.add(UDFRadians.class);
-    supportedGenericUDFs.add(UDFFloor.class);
-    supportedGenericUDFs.add(UDFCeil.class);
+    supportedGenericUDFs.add(GenericUDFFloor.class);
+    supportedGenericUDFs.add(GenericUDFCeil.class);
     supportedGenericUDFs.add(UDFExp.class);
     supportedGenericUDFs.add(UDFLn.class);
     supportedGenericUDFs.add(UDFLog2.class);
     supportedGenericUDFs.add(UDFLog10.class);
     supportedGenericUDFs.add(UDFLog.class);
-    supportedGenericUDFs.add(UDFPower.class);
+    supportedGenericUDFs.add(GenericUDFPower.class);
     supportedGenericUDFs.add(GenericUDFRound.class);
     supportedGenericUDFs.add(GenericUDFPosMod.class);
     supportedGenericUDFs.add(UDFSqrt.class);

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+public abstract class GenericUDFBaseUnary extends GenericUDF {
+  protected String opName;
+  protected String opDisplayName;
+
+  private transient PrimitiveObjectInspector inputOI;
+  protected transient PrimitiveObjectInspector resultOI;
+
+  protected transient Converter converter;
+
+  protected ByteWritable byteWritable = new ByteWritable();
+  protected ShortWritable shortWritable = new ShortWritable();
+  protected IntWritable intWritable = new IntWritable();
+  protected LongWritable longWritable = new LongWritable();
+  protected FloatWritable floatWritable = new FloatWritable();
+  protected DoubleWritable doubleWritable = new DoubleWritable();
+  protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
+
+  public GenericUDFBaseUnary() {
+    opName = getClass().getSimpleName();
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length != 1) {
+      throw new UDFArgumentException(opName + " requires one argument.");
+    }
+
+    Category category = arguments[0].getCategory();
+    if (category != Category.PRIMITIVE) {
+      throw new UDFArgumentTypeException(0, "The "
+          + GenericUDFUtils.getOrdinal(1)
+          + " argument of " + opName + "  is expected to a "
+          + Category.PRIMITIVE.toString().toLowerCase() + " type, but "
+          + category.toString().toLowerCase() + " is found");
+    }
+
+    inputOI = (PrimitiveObjectInspector) arguments[0];
+    if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) {
+      throw new UDFArgumentTypeException(0, "The "
+          + GenericUDFUtils.getOrdinal(1)
+          + " argument of " + opName + "  is expected to a "
+          + "numeric type, but "
+          + inputOI.getTypeName() + " is found");
+    }
+
+    PrimitiveTypeInfo resultTypeInfo = deriveResultTypeInfo(inputOI.getTypeInfo());
+    resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(resultTypeInfo);
+    converter = ObjectInspectorConverters.getConverter(inputOI, resultOI);
+    return resultOI;
+  }
+
+  private PrimitiveTypeInfo deriveResultTypeInfo(PrimitiveTypeInfo typeInfo) {
+    switch(typeInfo.getPrimitiveCategory()) {
+    case STRING:
+    case VARCHAR:
+    case CHAR:
+      return TypeInfoFactory.doubleTypeInfo;
+    default:
+      return typeInfo;
+    }
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    return "(" + opDisplayName + " " + children[0] + ")";
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCeil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCeil.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCeil.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCeil.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCeilDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCeilLongToLong;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.io.LongWritable;
+
+@Description(name = "ceil,ceiling",
+value = "_FUNC_(x) - Find the smallest integer not smaller than x",
+extended = "Example:\n"
+    + "  > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
+    + "  0\n"
+    + "  > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + "  5")
+@VectorizedExpressions({FuncCeilLongToLong.class, FuncCeilDoubleToLong.class})
+public final class GenericUDFCeil extends GenericUDFFloorCeilBase {
+
+  public GenericUDFCeil() {
+    super();
+    opDisplayName = "ceil";
+  }
+
+  @Override
+  protected LongWritable evaluate(DoubleWritable input) {
+    longWritable.set((long) Math.ceil(input.get()));
+    return longWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable input) {
+    HiveDecimal bd = input.getHiveDecimal();
+    decimalWritable.set(bd.setScale(0, HiveDecimal.ROUND_CEILING));
+    return decimalWritable;
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloor.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloor.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloor.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncFloorDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncFloorLongToLong;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.io.LongWritable;
+
+@Description(name = "floor",
+value = "_FUNC_(x) - Find the largest integer not greater than x",
+extended = "Example:\n"
+    + "  > SELECT _FUNC_(-0.1) FROM src LIMIT 1;\n"
+    + "  -1\n"
+    + "  > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + "  5")
+@VectorizedExpressions({FuncFloorLongToLong.class, FuncFloorDoubleToLong.class})
+public final class GenericUDFFloor extends GenericUDFFloorCeilBase {
+
+  public GenericUDFFloor() {
+    super();
+    opDisplayName = "floor";
+  }
+
+  @Override
+  protected LongWritable evaluate(DoubleWritable input) {
+    longWritable.set((long) Math.floor(input.get()));
+    return longWritable;
+  }
+
+  @Override
+  protected HiveDecimalWritable evaluate(HiveDecimalWritable input) {
+    HiveDecimal bd = input.getHiveDecimal();
+    decimalWritable.set(bd.setScale(0, HiveDecimal.ROUND_FLOOR));
+    return decimalWritable;
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFloorCeilBase.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.LongWritable;
+
+public abstract class GenericUDFFloorCeilBase extends GenericUDF {
+  private final String opName;
+  protected String opDisplayName;
+
+  private transient PrimitiveObjectInspector inputOI;
+  private transient PrimitiveObjectInspector resultOI;
+
+  private transient Converter converter;
+
+  protected LongWritable longWritable = new LongWritable();
+  protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
+
+  public GenericUDFFloorCeilBase() {
+    opName = getClass().getSimpleName();
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length != 1) {
+      throw new UDFArgumentException(opName + " requires one argument.");
+    }
+
+    Category category = arguments[0].getCategory();
+    if (category != Category.PRIMITIVE) {
+      throw new UDFArgumentTypeException(0, "The "
+          + GenericUDFUtils.getOrdinal(1)
+          + " argument of " + opName + "  is expected to a "
+          + Category.PRIMITIVE.toString().toLowerCase() + " type, but "
+          + category.toString().toLowerCase() + " is found");
+    }
+
+    inputOI = (PrimitiveObjectInspector) arguments[0];
+    if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) {
+      throw new UDFArgumentTypeException(0, "The "
+          + GenericUDFUtils.getOrdinal(1)
+          + " argument of " + opName + "  is expected to a "
+          + "numeric type, but "
+          + inputOI.getTypeName() + " is found");
+    }
+
+    PrimitiveTypeInfo resultTypeInfo = null;
+    PrimitiveTypeInfo inputTypeInfo = inputOI.getTypeInfo();
+    if (inputTypeInfo instanceof DecimalTypeInfo) {
+      DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inputTypeInfo;
+      resultTypeInfo = TypeInfoFactory.getDecimalTypeInfo(
+          decTypeInfo.precision() - decTypeInfo.scale() + 1, 0);
+      ObjectInspector decimalOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo);
+      converter =  ObjectInspectorConverters.getConverter(inputOI, decimalOI);
+    } else {
+      resultTypeInfo = TypeInfoFactory.longTypeInfo;
+      ObjectInspector doubleObjectInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+      converter = ObjectInspectorConverters.getConverter(inputOI, doubleObjectInspector);
+    }
+
+    return resultOI =
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(resultTypeInfo);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (arguments[0] == null) {
+      return null;
+    }
+
+    Object input = arguments[0].get();
+    if (input == null) {
+      return null;
+    }
+
+    input = converter.convert(input);
+    if (input == null) {
+      return null;
+    }
+
+    switch (resultOI.getPrimitiveCategory()) {
+    case LONG:
+      return evaluate((DoubleWritable)input);
+    case DECIMAL:
+      return evaluate((HiveDecimalWritable)input);
+    default:
+      // Should never happen.
+      throw new IllegalStateException("Unexpected type in evaluating " + opName + ": " +
+          inputOI.getPrimitiveCategory());
+    }
+  }
+
+  protected abstract LongWritable evaluate(DoubleWritable input);
+
+  protected abstract HiveDecimalWritable evaluate(HiveDecimalWritable input);
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    return opDisplayName + "(" + children[0] + ")";
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+
+@Description(name = "-", value = "_FUNC_ a - Returns -a")
+@VectorizedExpressions({LongColUnaryMinus.class, DoubleColUnaryMinus.class})
+public class GenericUDFOPNegative extends GenericUDFBaseUnary {
+
+  public GenericUDFOPNegative() {
+    super();
+    this.opDisplayName = "-";
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (arguments[0] == null) {
+      return null;
+    }
+
+    Object input = arguments[0].get();
+    if (input == null) {
+      return null;
+    }
+
+    input = converter.convert(input);
+    if (input == null) {
+      return null;
+    }
+
+    switch (resultOI.getPrimitiveCategory()) {
+    case BYTE:
+      byteWritable.set((byte) -(((ByteWritable)input).get()));
+      return byteWritable;
+    case SHORT:
+      shortWritable.set((short) -(((ShortWritable)input).get()));
+      return shortWritable;
+    case INT:
+      intWritable.set(-(((IntWritable)input).get()));
+      return intWritable;
+    case LONG:
+      longWritable.set(-(((LongWritable)input).get()));
+      return longWritable;
+    case FLOAT:
+      floatWritable.set(-(((FloatWritable)input).get()));
+      return floatWritable;
+    case DOUBLE:
+      doubleWritable.set(-(((DoubleWritable)input).get()));
+      return doubleWritable;
+    case DECIMAL:
+      HiveDecimal dec = ((HiveDecimalWritable)input).getHiveDecimal();
+      decimalWritable.set(dec.negate());
+      return decimalWritable;
+    default:
+      // Should never happen.
+      throw new RuntimeException("Unexpected type in evaluating " + opName + ": " +
+          resultOI.getPrimitiveCategory());
+    }
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPositive.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+@Description(name = "+", value = "_FUNC_ a - Returns a")
+public class GenericUDFOPPositive extends GenericUDFBaseUnary {
+
+  public GenericUDFOPPositive() {
+    super();
+    this.opDisplayName = "+";
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (arguments[0] == null) {
+      return null;
+    }
+
+    Object input = arguments[0].get();
+    if (input == null) {
+      return null;
+    }
+
+    return converter.convert(input);
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPower.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerLongToDouble;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+@Description(name = "power,pow",
+value = "_FUNC_(x1, x2) - raise x1 to the power of x2",
+extended = "Example:\n"
+    + "  > SELECT _FUNC_(2, 3) FROM src LIMIT 1;\n" + "  8")
+@VectorizedExpressions({FuncPowerLongToDouble.class, FuncPowerDoubleToDouble.class})
+public class GenericUDFPower extends GenericUDF {
+  private final String opName;
+  private final String opDisplayName;
+
+  private transient PrimitiveObjectInspector baseOI;
+  private transient PrimitiveObjectInspector powerOI;
+  protected transient PrimitiveObjectInspector resultOI;
+
+  private transient Converter baseConverter;
+  private transient Converter powerConverter;
+
+  private final DoubleWritable doubleWritable = new DoubleWritable();
+
+  public GenericUDFPower() {
+    opName = getClass().getSimpleName();
+    opDisplayName = "power";
+    resultOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length != 2) {
+      throw new UDFArgumentException(opName + " requires two arguments.");
+    }
+
+    for (int i = 0; i < 2; i++) {
+      Category category = arguments[i].getCategory();
+      if (category != Category.PRIMITIVE) {
+        throw new UDFArgumentTypeException(i, "The "
+            + GenericUDFUtils.getOrdinal(i + 1)
+            + " argument of " + opName + "  is expected to a "
+            + Category.PRIMITIVE.toString().toLowerCase() + " type, but "
+            + category.toString().toLowerCase() + " is found");
+      }
+    }
+
+    baseOI = (PrimitiveObjectInspector) arguments[0];
+    if (!FunctionRegistry.isNumericType(baseOI.getTypeInfo())) {
+      throw new UDFArgumentTypeException(0, "The "
+          + GenericUDFUtils.getOrdinal(1)
+          + " argument of " + opName + "  is expected to a "
+          + "numeric type, but "
+          + baseOI.getTypeName() + " is found");
+    }
+
+    powerOI = (PrimitiveObjectInspector) arguments[1];
+    if (!FunctionRegistry.isNumericType(powerOI.getTypeInfo())) {
+      throw new UDFArgumentTypeException(1, "The "
+          + GenericUDFUtils.getOrdinal(2)
+          + " argument of " + opName + "  is expected to a "
+          + "numeric type, but "
+          + powerOI.getTypeName() + " is found");
+    }
+
+    baseConverter = ObjectInspectorConverters.getConverter(baseOI,
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+    powerConverter = ObjectInspectorConverters.getConverter(powerOI,
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+    return resultOI;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 2);
+    return opDisplayName + "(" + children[0] + ", " + children[1] + ")";
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (arguments[0] == null || arguments[1] == null) {
+      return null;
+    }
+
+    Object base = arguments[0].get();
+    Object power = arguments[1].get();
+    if (base == null && power == null) {
+      return null;
+    }
+
+    base = baseConverter.convert(base);
+    if (base == null) {
+      return null;
+    }
+    power = powerConverter.convert(power);
+    if (power == null) {
+      return null;
+    }
+
+    doubleWritable.set(Math.pow(((DoubleWritable)base).get(), ((DoubleWritable)power).get()));
+    return doubleWritable;
+  }
+
+}

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1546157&r1=1546156&r2=1546157&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Wed Nov 27 18:52:41 2013
@@ -85,8 +85,6 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.UDFLTrim;
 import org.apache.hadoop.hive.ql.udf.UDFLog;
-import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFPower;
 import org.apache.hadoop.hive.ql.udf.UDFSin;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
@@ -101,10 +99,12 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPower;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
@@ -619,7 +619,7 @@ public class TestVectorizationContext {
   public void testUnaryMinusColumnLong() throws HiveException {
     ExprNodeColumnDesc col1Expr = new  ExprNodeColumnDesc(Integer.class, "col1", "table", false);
     ExprNodeGenericFuncDesc negExprDesc = new ExprNodeGenericFuncDesc();
-    GenericUDF gudf = new GenericUDFBridge("-", true, UDFOPNegative.class.getCanonicalName());
+    GenericUDF gudf = new GenericUDFOPNegative();
     negExprDesc.setGenericUDF(gudf);
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
     children.add(col1Expr);
@@ -637,7 +637,7 @@ public class TestVectorizationContext {
   public void testUnaryMinusColumnDouble() throws HiveException {
     ExprNodeColumnDesc col1Expr = new  ExprNodeColumnDesc(Float.class, "col1", "table", false);
     ExprNodeGenericFuncDesc negExprDesc = new ExprNodeGenericFuncDesc();
-    GenericUDF gudf = new GenericUDFBridge("-", true, UDFOPNegative.class.getCanonicalName());
+    GenericUDF gudf = new GenericUDFOPNegative();
     negExprDesc.setGenericUDF(gudf);
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
     children.add(col1Expr);
@@ -841,11 +841,10 @@ public class TestVectorizationContext {
     Assert.assertTrue(4.5 == ((FuncLogWithBaseLongToDouble) ve).getBase());
 
     //Power with double power
-    gudfBridge = new GenericUDFBridge("power", false, UDFPower.class.getName());
     children2.clear();
     children2.add(colDesc2);
     children2.add(new ExprNodeConstantDesc(4.5));
-    mathFuncExpr.setGenericUDF(gudfBridge);
+    mathFuncExpr.setGenericUDF(new GenericUDFPower());
     mathFuncExpr.setChildren(children2);
     ve = vc.getVectorExpression(mathFuncExpr);
     Assert.assertEquals(FuncPowerDoubleToDouble.class, ve.getClass());

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFCeil.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestGenericUDFCeil {
+
+  @Test
+  public void testByte() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    ByteWritable input = new ByteWritable((byte) 4);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableByteObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(4L, res.get());
+  }
+
+  @Test
+  public void testShort() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    ShortWritable input = new ShortWritable((short) -74);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableShortObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(-74L, res.get());
+  }
+
+  @Test
+  public void testInt() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    IntWritable input = new IntWritable(747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(747L, res.get());
+  }
+
+  @Test
+  public void testLong() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    LongWritable input = new LongWritable(3234747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableLongObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(3234747L, res.get());
+  }
+
+  @Test
+  public void testFloat() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    FloatWritable input = new FloatWritable(323.4747f);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(324L, res.get());
+  }
+
+  @Test
+  public void testDouble() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    DoubleWritable input = new DoubleWritable(32300.004747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32301L, res.get());
+  }
+
+  @Test
+  public void testDecimal() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
+    DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 0), oi.getTypeInfo());
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("32301"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testString() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    Text input = new Text("32300.004747");
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32301L, res.get());
+  }
+
+  @Test
+  public void testVarchar() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
+    HiveVarcharWritable input = new HiveVarcharWritable(vc);
+    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32301L, res.get());
+  }
+
+  @Test
+  public void testChar() throws HiveException {
+    GenericUDFCeil udf = new GenericUDFCeil();
+
+    HiveChar vc = new HiveChar("-32300.004747", 12);
+    HiveCharWritable input = new HiveCharWritable(vc);
+    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(-32300L, res.get());
+  }
+
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFloor.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestGenericUDFFloor {
+
+  @Test
+  public void testByte() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    ByteWritable input = new ByteWritable((byte) 4);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableByteObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(4L, res.get());
+  }
+
+  @Test
+  public void testShort() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    ShortWritable input = new ShortWritable((short) 74);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableShortObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(74L, res.get());
+  }
+
+  @Test
+  public void testInt() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    IntWritable input = new IntWritable(-747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(-747L, res.get());
+  }
+
+  @Test
+  public void testLong() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    LongWritable input = new LongWritable(3234747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableLongObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(3234747L, res.get());
+  }
+
+  @Test
+  public void testFloat() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    FloatWritable input = new FloatWritable(-323.4747f);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(-324L, res.get());
+  }
+
+  @Test
+  public void testDouble() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    DoubleWritable input = new DoubleWritable(32300.004747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32300L, res.get());
+  }
+
+  @Test
+  public void testDecimal() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
+    DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 0), oi.getTypeInfo());
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("32300"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testString() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    Text input = new Text("32300.004747");
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32300L, res.get());
+  }
+
+  @Test
+  public void testVarchar() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
+    HiveVarcharWritable input = new HiveVarcharWritable(vc);
+    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32300L, res.get());
+  }
+
+  @Test
+  public void testChar() throws HiveException {
+    GenericUDFFloor udf = new GenericUDFFloor();
+
+    HiveChar vc = new HiveChar("32300.004747", 12);
+    HiveCharWritable input = new HiveCharWritable(vc);
+    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(32300L, res.get());
+  }
+
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestGenericUDFOPNegative {
+
+  @Test
+  public void testByte() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    ByteWritable input = new ByteWritable((byte) 4);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableByteObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.byteTypeInfo, oi.getTypeInfo());
+    ByteWritable res = (ByteWritable) udf.evaluate(args);
+    Assert.assertEquals((byte)-4, res.get());
+  }
+
+  @Test
+  public void testShort() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    ShortWritable input = new ShortWritable((short) 74);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableShortObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.shortTypeInfo, oi.getTypeInfo());
+    ShortWritable res = (ShortWritable) udf.evaluate(args);
+    Assert.assertEquals((short)-74, res.get());
+  }
+
+  @Test
+  public void testInt() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    IntWritable input = new IntWritable(747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.intTypeInfo, oi.getTypeInfo());
+    IntWritable res = (IntWritable) udf.evaluate(args);
+    Assert.assertEquals(-747, res.get());
+  }
+
+  @Test
+  public void testLong() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    LongWritable input = new LongWritable(3234747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableLongObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(-3234747L, res.get());
+  }
+
+  @Test
+  public void testFloat() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    FloatWritable input = new FloatWritable(323.4747f);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.floatTypeInfo, oi.getTypeInfo());
+    FloatWritable res = (FloatWritable) udf.evaluate(args);
+    Assert.assertEquals(new Float(-323.4747f), new Float(res.get()));
+  }
+
+  @Test
+  public void testDouble() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    DoubleWritable input = new DoubleWritable(32300.004747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(-32300.004747), new Double(res.get()));
+  }
+
+  @Test
+  public void testDecimal() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
+    DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(inputTypeInfo, oi.getTypeInfo());
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("-32300.004747"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testString() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    Text input = new Text("32300.004747");
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(-32300.004747), new Double(res.get()));
+  }
+
+  @Test
+  public void testVarchar() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
+    HiveVarcharWritable input = new HiveVarcharWritable(vc);
+    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(-32300.004747), new Double(res.get()));
+  }
+
+  @Test
+  public void testChar() throws HiveException {
+    GenericUDFOPNegative udf = new GenericUDFOPNegative();
+
+    HiveChar vc = new HiveChar("32300.004747", 12);
+    HiveCharWritable input = new HiveCharWritable(vc);
+    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(-32300.004747), new Double(res.get()));
+  }
+
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java?rev=1546157&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java Wed Nov 27 18:52:41 2013
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestGenericUDFOPPositive {
+
+  @Test
+  public void testByte() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    ByteWritable input = new ByteWritable((byte) 4);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableByteObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.byteTypeInfo, oi.getTypeInfo());
+    ByteWritable res = (ByteWritable) udf.evaluate(args);
+    Assert.assertEquals((byte)4, res.get());
+  }
+
+  @Test
+  public void testShort() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    ShortWritable input = new ShortWritable((short) 74);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableShortObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.shortTypeInfo, oi.getTypeInfo());
+    ShortWritable res = (ShortWritable) udf.evaluate(args);
+    Assert.assertEquals((short)74, res.get());
+  }
+
+  @Test
+  public void testInt() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    IntWritable input = new IntWritable(747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.intTypeInfo, oi.getTypeInfo());
+    IntWritable res = (IntWritable) udf.evaluate(args);
+    Assert.assertEquals(747, res.get());
+  }
+
+  @Test
+  public void testLong() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    LongWritable input = new LongWritable(3234747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableLongObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
+    LongWritable res = (LongWritable) udf.evaluate(args);
+    Assert.assertEquals(3234747L, res.get());
+  }
+
+  @Test
+  public void testFloat() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    FloatWritable input = new FloatWritable(323.4747f);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.floatTypeInfo, oi.getTypeInfo());
+    FloatWritable res = (FloatWritable) udf.evaluate(args);
+    Assert.assertEquals(new Float(323.4747f), new Float(res.get()));
+  }
+
+  @Test
+  public void testDouble() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    DoubleWritable input = new DoubleWritable(32300.004747);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(32300.004747), new Double(res.get()));
+  }
+
+  @Test
+  public void testDecimal() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
+    DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(inputTypeInfo, oi.getTypeInfo());
+    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveDecimal.create("32300.004747"), res.getHiveDecimal());
+  }
+
+  @Test
+  public void testString() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    Text input = new Text("32300.004747");
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(32300.004747), new Double(res.get()));
+  }
+
+  @Test
+  public void testVarchar() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
+    HiveVarcharWritable input = new HiveVarcharWritable(vc);
+    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(32300.004747), new Double(res.get()));
+  }
+
+  @Test
+  public void testChar() throws HiveException {
+    GenericUDFOPPositive udf = new GenericUDFOPPositive();
+
+    HiveChar vc = new HiveChar("32300.004747", 12);
+    HiveCharWritable input = new HiveCharWritable(vc);
+    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(input)
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
+    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
+    Assert.assertEquals(new Double(32300.004747), new Double(res.get()));
+  }
+
+}