You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/17 09:29:46 UTC

svn commit: r1468783 [7/16] - in /hive/branches/HIVE-4115: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/ beeline/src/test/org/apache/ beeline/src/test/org/apache/hive/ beeline/src/test/org/apache/hive/beeline/ beeline/src/test/org/...

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/ListBucketingCtx.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/ListBucketingCtx.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/ListBucketingCtx.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/ListBucketingCtx.java Wed Apr 17 07:29:38 2013
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.metastore.api.SkewedValueList;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
@@ -39,7 +40,7 @@ public class ListBucketingCtx implements
   private static final long serialVersionUID = 1L;
   private List<String> skewedColNames;
   private List<List<String>> skewedColValues;
-  private Map<List<String>, String> lbLocationMap;
+  private Map<SkewedValueList, String> lbLocationMap;
   private List<SkewedColumnPositionPair> rowSkewedIndex;
   private boolean isStoredAsSubDirectories;
   private String defaultKey;
@@ -82,14 +83,14 @@ public class ListBucketingCtx implements
   /**
    * @return the lbLocationMap
    */
-  public Map<List<String>, String> getLbLocationMap() {
+  public Map<SkewedValueList, String> getLbLocationMap() {
     return lbLocationMap;
   }
 
   /**
    * @param lbLocationMap the lbLocationMap to set
    */
-  public void setLbLocationMap(Map<List<String>, String> lbLocationMap) {
+  public void setLbLocationMap(Map<SkewedValueList, String> lbLocationMap) {
     this.lbLocationMap = lbLocationMap;
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Wed Apr 17 07:29:38 2013
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -54,8 +55,12 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 
+@SuppressWarnings("deprecation")
 public class PTFDeserializer {
 
   PTFDesc ptfDesc;
@@ -83,7 +88,7 @@ public class PTFDeserializer {
     while ( !ptfChain.isEmpty() ) {
       currentDef = ptfChain.pop();
       if ( currentDef instanceof PTFQueryInputDef) {
-        initialize((PTFQueryInputDef)currentDef);
+        initialize((PTFQueryInputDef)currentDef, inputOI);
       }
       else if ( currentDef instanceof WindowTableFunctionDef) {
         initializeWindowing((WindowTableFunctionDef)currentDef);
@@ -101,8 +106,6 @@ public class PTFDeserializer {
      * 1. setup resolve, make connections
      */
     TableFunctionEvaluator tEval = def.getTFunction();
-    /*WindowingTableFunctionResolver tResolver = (WindowingTableFunctionResolver)
-        FunctionRegistry.getTableFunctionResolver(def.getName());*/
     WindowingTableFunctionResolver tResolver =
         (WindowingTableFunctionResolver) constructResolver(def.getResolverClassName());
     tResolver.initialize(ptfDesc, def, tEval);
@@ -141,7 +144,7 @@ public class PTFDeserializer {
       StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(
           aliases, fieldOIs);
       tResolver.setWdwProcessingOutputOI(wdwOutOI);
-      initialize(def.getOutputFromWdwFnProcessing());
+      initialize(def.getOutputFromWdwFnProcessing(), wdwOutOI);
     }
     else {
       def.setOutputFromWdwFnProcessing(inpShape);
@@ -161,8 +164,8 @@ public class PTFDeserializer {
     /*
      * 4. give Evaluator chance to setup for Output execution; setup Output shape.
      */
-    initialize(def.getOutputShape());
     tResolver.initializeOutputOI();
+    initialize(def.getOutputShape(), tEval.getOutputOI());
 
     /*
      * If we have windowExpressions then we convert to Std. Object to process;
@@ -175,9 +178,9 @@ public class PTFDeserializer {
     }
   }
 
-  protected void initialize(PTFQueryInputDef def) throws HiveException {
+  protected void initialize(PTFQueryInputDef def, StructObjectInspector OI) throws HiveException {
     ShapeDetails outShape = def.getOutputShape();
-    initialize(outShape);
+    initialize(outShape, OI);
   }
 
   protected void initialize(PartitionedTableFunctionDef def) throws HiveException {
@@ -206,7 +209,7 @@ public class PTFDeserializer {
     if (tEval.isTransformsRawInput())
     {
       tResolver.initializeRawInputOI();
-      initialize(def.getRawInputShape());
+      initialize(def.getRawInputShape(), tEval.getRawInputOI());
     }
     else {
       def.setRawInputShape(inpShape);
@@ -218,7 +221,7 @@ public class PTFDeserializer {
      * 4. give Evaluator chance to setup for Output execution; setup Output shape.
      */
     tResolver.initializeOutputOI();
-    initialize(def.getOutputShape());
+    initialize(def.getOutputShape(), tEval.getOutputOI());
   }
 
   static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException
@@ -286,10 +289,11 @@ public class PTFDeserializer {
     return outOI;
   }
 
-  protected void initialize(ShapeDetails shp) throws HiveException {
+  protected void initialize(ShapeDetails shp, StructObjectInspector OI) throws HiveException {
     String serdeClassName = shp.getSerdeClassName();
     Properties serDeProps = new Properties();
-    Map<String, String> serdePropsMap = shp.getSerdeProps();
+    Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
+    addOIPropertiestoSerDePropsMap(OI, serdePropsMap);
     for (String serdeName : serdePropsMap.keySet()) {
       serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
     }
@@ -328,4 +332,43 @@ public class PTFDeserializer {
     }
   }
 
+  @SuppressWarnings({"unchecked"})
+  public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI,
+      Map<String,String> serdePropsMap) {
+
+    if ( serdePropsMap == null ) {
+      return;
+    }
+
+    ArrayList<? extends Object>[] tInfo = getTypeMap(OI);
+
+    ArrayList<String> columnNames = (ArrayList<String>) tInfo[0];
+    ArrayList<TypeInfo> fields = (ArrayList<TypeInfo>) tInfo[1];
+    StringBuilder cNames = new StringBuilder();
+    StringBuilder cTypes = new StringBuilder();
+
+    for (int i = 0; i < fields.size(); i++)
+    {
+      cNames.append(i > 0 ? "," : "");
+      cTypes.append(i > 0 ? "," : "");
+      cNames.append(columnNames.get(i));
+      cTypes.append(fields.get(i).getTypeName());
+    }
+
+    serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
+        cNames.toString());
+    serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
+        cTypes.toString());
+  }
+
+  private static ArrayList<? extends Object>[] getTypeMap(
+      StructObjectInspector oi) {
+    StructTypeInfo t = (StructTypeInfo) TypeInfoUtils
+        .getTypeInfoFromObjectInspector(oi);
+    ArrayList<String> fnames = t.getAllStructFieldNames();
+    ArrayList<TypeInfo> fields = t.getAllStructFieldTypeInfos();
+    return new ArrayList<?>[]
+    { fnames, fields };
+  }
+
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilePublisher.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilePublisher.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilePublisher.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilePublisher.java Wed Apr 17 07:29:38 2013
@@ -45,7 +45,7 @@ public class HiveProfilePublisher {
       }
       info.getConnection().close();
       return true;
-    } catch (SQLException e) {
+    } catch (Exception e) {
       LOG.error("Error during JDBC termination. ", e);
       return false;
     }
@@ -100,7 +100,7 @@ public class HiveProfilePublisher {
         };
         PreparedStatement insStmt = info.getInsert(stats);
         Utilities.executeWithRetry(execUpdate, insStmt, info.getWaitWindow(), info.getMaxRetries());
-      } catch (SQLException e) {
+      } catch (Exception e) {
         LOG.error("ERROR during publishing profiling data. ", e);
         return false;
       }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfiler.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfiler.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfiler.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfiler.java Wed Apr 17 07:29:38 2013
@@ -17,18 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.profiler;
 
-import java.lang.System;
-import java.util.LinkedList;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Iterator;
 import java.util.Collection;
-import java.util.List;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.OperatorHook;
 import org.apache.hadoop.hive.ql.exec.OperatorHookContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -37,41 +33,40 @@ public class HiveProfiler implements Ope
   private final Log LOG = LogFactory.getLog(this.getClass().getName());
   private static final HiveProfilePublisher pub = new HiveProfilePublisher();
 
-  private LinkedList<HiveProfilerEntry> operatorCallStack =
-    new LinkedList<HiveProfilerEntry>();
+  private final Map<String, HiveProfilerEntry> operatorCallStack =
+    new ConcurrentHashMap<String, HiveProfilerEntry>();
 
   // Aggregates stats for each operator in memory so that stats are written to DB
   // all at once - this allows the profiler to be extremely lightweight in
   // communication with the DB
-  private Map<String, HiveProfilerStats> aggrStats =
-    new HashMap<String, HiveProfilerStats>();
+  private final Map<String, HiveProfilerStats> aggrStats =
+    new ConcurrentHashMap<String, HiveProfilerStats>();
 
   public void enter(OperatorHookContext opHookContext) throws HiveException {
+    String opLevelAnnoName = HiveProfilerUtils.getLevelAnnotatedName(opHookContext);
     HiveProfilerEntry curEntry = new HiveProfilerEntry(opHookContext);
-    operatorCallStack.addFirst(curEntry);
+    assert(operatorCallStack.get(opLevelAnnoName) == null);
+    operatorCallStack.put(opLevelAnnoName, curEntry);
   }
 
-  private void exit(HiveProfilerEntry curEntry, HiveProfilerEntry parentEntry) {
+  private void exit(HiveProfilerEntry curEntry) {
     OperatorHookContext opHookContext = curEntry.getOperatorHookContext();
-
     // update the metrics we are
     long exitTime = System.nanoTime();
     long wallTime = exitTime - curEntry.wallStartTime;
 
     String opName = opHookContext.getOperatorName();
 
-    OperatorHookContext parentContext =
-      parentEntry != null ? parentEntry.getOperatorHookContext() :
-        null;
     Configuration conf = opHookContext.getOperator().getConfiguration();
 
-    String opId = opHookContext.getOperatorId();
-    if (aggrStats.containsKey(opId)) {
-      aggrStats.get(opId).updateStats(wallTime, 1);
+    String opLevelAnnoName = HiveProfilerUtils.getLevelAnnotatedName(opHookContext);
+
+    if (aggrStats.containsKey(opLevelAnnoName)) {
+      aggrStats.get(opLevelAnnoName).updateStats(wallTime, 1);
     } else {
       HiveProfilerStats stats =
-        new HiveProfilerStats(opHookContext, parentContext, 1, wallTime, conf);
-      aggrStats.put(opId, stats);
+        new HiveProfilerStats(opHookContext, 1, wallTime, conf);
+      aggrStats.put(opLevelAnnoName, stats);
     }
 
   }
@@ -79,16 +74,17 @@ public class HiveProfiler implements Ope
     if (operatorCallStack.isEmpty()) {
       LOG.error("Unexpected state: Operator Call Stack is empty on exit.");
     }
+    String opLevelAnnoName = HiveProfilerUtils.getLevelAnnotatedName(opHookContext);
+
+    HiveProfilerEntry curEntry = operatorCallStack.get(opLevelAnnoName);
 
-    // grab the top item on the call stack since that should be
-    // the first operator to exit.
-    HiveProfilerEntry curEntry = operatorCallStack.poll();
     if (!curEntry.getOperatorHookContext().equals(opHookContext)) {
       LOG.error("Expected to exit from: " + curEntry.getOperatorHookContext().toString() +
         " but exit called on " + opHookContext.toString());
     }
-    HiveProfilerEntry parentEntry = operatorCallStack.peekFirst();
-    exit(curEntry, parentEntry);
+
+    exit(curEntry);
+    operatorCallStack.remove(opLevelAnnoName);
   }
 
   public void close(OperatorHookContext opHookContext) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStats.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStats.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStats.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStats.java Wed Apr 17 07:29:38 2013
@@ -21,6 +21,7 @@ import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorHookContext;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
@@ -50,7 +51,7 @@ public class HiveProfilerStats {
     Columns.CALL_COUNT
   };
 
-  private Map<String, String> stats = new HashMap<String, String>();
+  private final Map<String, String> stats = new HashMap<String, String>();
 
   long callCount;
   long inclTime;
@@ -58,16 +59,14 @@ public class HiveProfilerStats {
 
   protected HiveProfilerStats(
     OperatorHookContext opHookContext,
-    OperatorHookContext parentOpHookContext,
     long callCount, long wallTime, Configuration conf) {
     this.callCount = callCount;
     this.inclTime = wallTime;
     this.taskId = Utilities.getTaskId(conf);
-    populateStatsMap(opHookContext, parentOpHookContext, conf);
+    populateStatsMap(opHookContext, conf);
   }
 
   private void populateStatsMap(OperatorHookContext opHookContext,
-    OperatorHookContext parentOpHookContext,
     Configuration conf) {
     String queryId =
       conf == null ? "no conf" : HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID);
@@ -78,17 +77,14 @@ public class HiveProfilerStats {
     stats.put(
       Columns.OPERATOR_ID, opHookContext.getOperatorId());
 
-    String parentOpName = parentOpHookContext == null ? "" : parentOpHookContext.getOperatorName();
+    Operator parent = opHookContext.getParentOperator();
+    String parentOpName = parent == null ? "" : parent.getName();
     stats.put(Columns.PARENT_OPERATOR_NAME, parentOpName);
 
-
-    String parentOpId = parentOpHookContext == null ? "-1" : parentOpHookContext.getOperatorId();
+    String parentOpId = parent == null ? "-1" : parent.getIdentifier();
     stats.put(Columns.PARENT_OPERATOR_ID, parentOpId);
 
-    String levelAnnoOpName = opName + "_" + opHookContext.getOperatorId();
-    String levelAnnoName = parentOpHookContext == null ? "main() ==> " + levelAnnoOpName :
-      parentOpName + "_" +  parentOpId + " ==> " + levelAnnoOpName;
-    stats.put(Columns.LEVEL_ANNO_NAME, levelAnnoName);
+    stats.put(Columns.LEVEL_ANNO_NAME, HiveProfilerUtils.getLevelAnnotatedName(opHookContext));
 
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStatsAggregator.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStatsAggregator.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStatsAggregator.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerStatsAggregator.java Wed Apr 17 07:29:38 2013
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.profil
 import java.util.HashMap;
 import java.util.Map;
 import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 
@@ -58,7 +57,7 @@ public class HiveProfilerStatsAggregator
   }
 
 
-  private void populateAggregateStats(HiveConf conf) {
+  private void populateAggregateStats(HiveConf conf) throws SQLException {
     int waitWindow = rawProfileConnInfo.getWaitWindow();
     int maxRetries = rawProfileConnInfo.getMaxRetries();
 
@@ -83,9 +82,10 @@ public class HiveProfilerStatsAggregator
 
       populateAggregateStats(result);
       getProfileStatsStmt.close();
-      rawProfileConnInfo.getConnection().close();
     } catch(Exception e) {
       LOG.error("executing error: ", e);
+    } finally {
+      HiveProfilerUtils.closeConnection(rawProfileConnInfo);
     }
   }
 
@@ -110,7 +110,7 @@ public class HiveProfilerStatsAggregator
           stats.put(levelAnnoName, curStat);
         }
       }
-    } catch (SQLException e) {
+    } catch (Exception e) {
       LOG.error("Error Aggregating Stats", e);
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerUtils.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerUtils.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/profiler/HiveProfilerUtils.java Wed Apr 17 07:29:38 2013
@@ -18,14 +18,15 @@
 package org.apache.hadoop.hive.ql.profiler;
 
 import java.sql.Connection;
-import java.sql.Statement;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.MapOperator;
+import org.apache.hadoop.hive.ql.exec.OperatorHookContext;
 
 public class HiveProfilerUtils {
   public static void createTableIfNonExistent(HiveProfilerConnectionInfo info,
@@ -57,4 +58,16 @@ public class HiveProfilerUtils {
     }
     return true;
   }
+
+  public static String getLevelAnnotatedName(OperatorHookContext opHookContext) {
+    Operator parent = opHookContext.getParentOperator();
+    if (parent != null && parent instanceof MapOperator) {
+      parent = null;
+    }
+    Operator op = opHookContext.getOperator();
+    String parentOpName = parent == null ? "" : parent.getName();
+    String parentOpId = parent == null ? "main()" : parent.getOperatorId();
+    String levelAnnoName = parentOpId + " ==> " + op.getOperatorId();
+    return levelAnnoName;
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAbs.java Wed Apr 17 07:29:38 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -39,7 +39,7 @@ public class UDFAbs extends UDF {
   private final DoubleWritable resultDouble = new DoubleWritable();
   private final LongWritable resultLong = new LongWritable();
   private final IntWritable resultInt = new IntWritable();
-  private final BigDecimalWritable resultBigDecimal = new BigDecimalWritable();
+  private final HiveDecimalWritable resultHiveDecimal = new HiveDecimalWritable();
 
   public DoubleWritable evaluate(DoubleWritable n) {
     if (n == null) {
@@ -71,12 +71,12 @@ public class UDFAbs extends UDF {
     return resultInt;
   }
 
-  public BigDecimalWritable evaluate(BigDecimalWritable n) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable n) {
     if (n == null) {
       return null;
     }
 
-    resultBigDecimal.set(n.getBigDecimal().abs());
-    return resultBigDecimal;
+    resultHiveDecimal.set(n.getHiveDecimal().abs());
+    return resultHiveDecimal;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java Wed Apr 17 07:29:38 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.NumericOpMethodResolver;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -49,7 +49,7 @@ public abstract class UDFBaseNumericOp e
   protected LongWritable longWritable = new LongWritable();
   protected FloatWritable floatWritable = new FloatWritable();
   protected DoubleWritable doubleWritable = new DoubleWritable();
-  protected BigDecimalWritable bigDecimalWritable = new BigDecimalWritable();
+  protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
 
   public abstract ByteWritable evaluate(ByteWritable a, ByteWritable b);
 
@@ -63,5 +63,5 @@ public abstract class UDFBaseNumericOp e
 
   public abstract DoubleWritable evaluate(DoubleWritable a, DoubleWritable b);
 
-  public abstract BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b);
+  public abstract HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b);
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericUnaryOp.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -46,7 +46,7 @@ public abstract class UDFBaseNumericUnar
   protected LongWritable longWritable = new LongWritable();
   protected FloatWritable floatWritable = new FloatWritable();
   protected DoubleWritable doubleWritable = new DoubleWritable();
-  protected BigDecimalWritable bigDecimalWritable = new BigDecimalWritable();
+  protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
 
   public abstract ByteWritable evaluate(ByteWritable a);
 
@@ -60,5 +60,5 @@ public abstract class UDFBaseNumericUnar
 
   public abstract DoubleWritable evaluate(DoubleWritable a);
 
-  public abstract BigDecimalWritable evaluate(BigDecimalWritable a);
+  public abstract HiveDecimalWritable evaluate(HiveDecimalWritable a);
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java Wed Apr 17 07:29:38 2013
@@ -18,14 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.math.BigDecimal;
-import java.math.MathContext;
-import java.math.RoundingMode;
-
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.LongWritable;
 
 /**
@@ -40,7 +37,7 @@ import org.apache.hadoop.io.LongWritable
     + "  > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + "  5")
 public class UDFCeil extends UDF {
   private final LongWritable longWritable = new LongWritable();
-  private final BigDecimalWritable bigDecimalWritable = new BigDecimalWritable();
+  private final HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
 
   public UDFCeil() {
   }
@@ -54,14 +51,14 @@ public class UDFCeil extends UDF {
     }
   }
 
-  public BigDecimalWritable evaluate(BigDecimalWritable i) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      BigDecimal bd = i.getBigDecimal();
+      HiveDecimal bd = i.getHiveDecimal();
       int origScale = bd.scale();
-      bigDecimalWritable.set(bd.setScale(0, BigDecimal.ROUND_CEILING).setScale(origScale));
-      return bigDecimalWritable;
+      decimalWritable.set(bd.setScale(0, HiveDecimal.ROUND_CEILING).setScale(origScale));
+      return decimalWritable;
     }
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java Wed Apr 17 07:29:38 2013
@@ -18,14 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.math.BigDecimal;
-import java.math.MathContext;
-import java.math.RoundingMode;
-
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.LongWritable;
 
 /**
@@ -40,7 +37,7 @@ import org.apache.hadoop.io.LongWritable
     + "  > SELECT _FUNC_(5) FROM src LIMIT 1;\n" + "  5")
 public class UDFFloor extends UDF {
   private final LongWritable result = new LongWritable();
-  private final BigDecimalWritable bdResult = new BigDecimalWritable();
+  private final HiveDecimalWritable bdResult = new HiveDecimalWritable();
 
   public UDFFloor() {
   }
@@ -54,13 +51,13 @@ public class UDFFloor extends UDF {
     }
   }
 
-  public BigDecimalWritable evaluate(BigDecimalWritable i) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      BigDecimal bd = i.getBigDecimal();
+      HiveDecimal bd = i.getHiveDecimal();
       int origScale = bd.scale();
-      bdResult.set(bd.setScale(0, BigDecimal.ROUND_FLOOR).setScale(origScale));
+      bdResult.set(bd.setScale(0, HiveDecimal.ROUND_FLOOR).setScale(origScale));
       return bdResult;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java Wed Apr 17 07:29:38 2013
@@ -18,13 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.math.BigDecimal;
-import java.math.RoundingMode;
-
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
  * UDFOPDivide.
@@ -38,10 +36,9 @@ import org.apache.hadoop.hive.serde2.io.
  * return a double for divide.
  */
 public class UDFOPDivide extends UDF {
-  private final DoubleWritable doubleWritable = new DoubleWritable();
-  private final BigDecimalWritable bigDecimalWritable = new BigDecimalWritable();
 
-  private final int MAX_SCALE = 65; // max compatible with MySQL
+  private final DoubleWritable doubleWritable = new DoubleWritable();
+  private final HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
 
   public DoubleWritable evaluate(DoubleWritable a, DoubleWritable b) {
     // LOG.info("Get input " + a.getClass() + ":" + a + " " + b.getClass() + ":"
@@ -54,17 +51,17 @@ public class UDFOPDivide extends UDF {
     return doubleWritable;
   }
 
-  public BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b) {
     if ((a == null) || (b == null)) {
       return null;
     }
-    if (b.getBigDecimal().compareTo(BigDecimal.ZERO) == 0) {
+    if (b.getHiveDecimal().compareTo(HiveDecimal.ZERO) == 0) {
       return null;
     } else {
-        bigDecimalWritable.set(a.getBigDecimal().divide(
-          b.getBigDecimal(), MAX_SCALE, RoundingMode.HALF_UP));
+        decimalWritable.set(a.getHiveDecimal().divide(
+          b.getHiveDecimal()));
     }
 
-    return bigDecimalWritable;
+    return decimalWritable;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java Wed Apr 17 07:29:38 2013
@@ -19,9 +19,9 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -110,13 +110,18 @@ public class UDFOPMinus extends UDFBaseN
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b) {
 
     if ((a == null) || (b == null)) {
       return null;
     }
 
-    bigDecimalWritable.set(a.getBigDecimal().subtract(b.getBigDecimal()));
-    return bigDecimalWritable;
+    try {
+      decimalWritable.set(a.getHiveDecimal().subtract(b.getHiveDecimal()));
+    } catch (NumberFormatException e) {
+      return null;
+    }
+
+    return decimalWritable;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java Wed Apr 17 07:29:38 2013
@@ -18,12 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.math.BigDecimal;
-
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -112,19 +111,24 @@ public class UDFOPMod extends UDFBaseNum
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b) {
     if ((a == null) || (b == null)) {
       return null;
     }
 
-    BigDecimal av = a.getBigDecimal();
-    BigDecimal bv = b.getBigDecimal();
+    HiveDecimal av = a.getHiveDecimal();
+    HiveDecimal bv = b.getHiveDecimal();
+
+    if (bv.compareTo(HiveDecimal.ZERO) == 0) {
+      return null;
+    }
 
-    if (bv.compareTo(BigDecimal.ZERO) == 0) {
+    try {
+      decimalWritable.set(av.remainder(bv));
+    } catch(NumberFormatException e) {
       return null;
     }
 
-    bigDecimalWritable.set(av.remainder(bv));
-    return bigDecimalWritable;
+    return decimalWritable;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java Wed Apr 17 07:29:38 2013
@@ -19,9 +19,9 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -110,12 +110,17 @@ public class UDFOPMultiply extends UDFBa
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b) {
     if ((a == null) || (b == null)) {
       return null;
     }
 
-    bigDecimalWritable.set(a.getBigDecimal().multiply(b.getBigDecimal()));
-    return bigDecimalWritable;
+    try {
+      decimalWritable.set(a.getHiveDecimal().multiply(b.getHiveDecimal()));
+    } catch (NumberFormatException e) {
+      return null;
+    }
+
+    return decimalWritable;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -92,12 +92,12 @@ public class UDFOPNegative extends UDFBa
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a) {
     if (a == null) {
       return null;
     }
-    bigDecimalWritable.set(a.getBigDecimal().negate());
-    return bigDecimalWritable;
+    decimalWritable.set(a.getHiveDecimal().negate());
+    return decimalWritable;
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java Wed Apr 17 07:29:38 2013
@@ -19,9 +19,9 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -115,13 +115,17 @@ public class UDFOPPlus extends UDFBaseNu
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b) {
     if ((a == null) || (b == null)) {
       return null;
     }
 
-    bigDecimalWritable.set(a.getBigDecimal().add(b.getBigDecimal()));
-    return bigDecimalWritable;
+    try {
+      decimalWritable.set(a.getHiveDecimal().add(b.getHiveDecimal()));
+    } catch(NumberFormatException e) {
+      return null;
+    }
+    return decimalWritable;
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -68,7 +68,7 @@ public class UDFOPPositive extends UDFBa
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a) {
     return a;
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java Wed Apr 17 07:29:38 2013
@@ -18,12 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.math.BigDecimal;
-
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -113,19 +112,24 @@ public class UDFPosMod extends UDFBaseNu
   }
 
   @Override
-  public BigDecimalWritable evaluate(BigDecimalWritable a, BigDecimalWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, HiveDecimalWritable b) {
     if ((a == null) || (b == null)) {
       return null;
     }
 
-    BigDecimal av = a.getBigDecimal();
-    BigDecimal bv = b.getBigDecimal();
+    HiveDecimal av = a.getHiveDecimal();
+    HiveDecimal bv = b.getHiveDecimal();
+
+    if (bv.compareTo(HiveDecimal.ZERO) == 0) {
+      return null;
+    }
 
-    if (bv.compareTo(BigDecimal.ZERO) == 0) {
+    try {
+      decimalWritable.set(av.remainder(bv).add(bv).remainder(bv));
+    } catch (NumberFormatException e) {
       return null;
     }
 
-    bigDecimalWritable.set(av.remainder(bv).add(bv).remainder(bv));
-    return bigDecimalWritable;
+    return decimalWritable;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPower.java Wed Apr 17 07:29:38 2013
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.IntWritable;
 
 /**
@@ -34,7 +34,7 @@ import org.apache.hadoop.io.IntWritable;
     + "  > SELECT _FUNC_(2, 3) FROM src LIMIT 1;\n" + "  8")
 public class UDFPower extends UDF {
   private final DoubleWritable resultDouble = new DoubleWritable();
-  private final BigDecimalWritable resultBigDecimal = new BigDecimalWritable();
+  private final HiveDecimalWritable resultHiveDecimal = new HiveDecimalWritable();
 
   public UDFPower() {
   }
@@ -50,7 +50,7 @@ public class UDFPower extends UDF {
       return resultDouble;
     }
   }
-  
+
   /**
    * Raise a to the power of b.
    */
@@ -62,16 +62,20 @@ public class UDFPower extends UDF {
       return resultDouble;
     }
   }
-  
+
   /**
    * Raise a to the power of b
    */
-  public BigDecimalWritable evaluate(BigDecimalWritable a, IntWritable b) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable a, IntWritable b) {
     if (a == null || b == null) {
       return null;
     } else {
-      resultBigDecimal.set(a.getBigDecimal().pow(b.get()));
-      return resultBigDecimal;
+      try {
+        resultHiveDecimal.set(a.getHiveDecimal().pow(b.get()));
+      } catch (NumberFormatException e) {
+        return null;
+      }
+      return resultHiveDecimal;
     }
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java Wed Apr 17 07:29:38 2013
@@ -21,11 +21,12 @@ package org.apache.hadoop.hive.ql.udf;
 import java.math.BigDecimal;
 import java.math.RoundingMode;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -39,7 +40,7 @@ import org.apache.hadoop.io.LongWritable
     extended = "Example:\n"
     + "  > SELECT _FUNC_(12.3456, 1) FROM src LIMIT 1;\n" + "  12.3'")
 public class UDFRound extends UDF {
-  private final BigDecimalWritable bigDecimalWritable = new BigDecimalWritable();
+  private final HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
   private final DoubleWritable doubleWritable = new DoubleWritable();
   private final LongWritable longWritable = new LongWritable();
   private final IntWritable intWritable = new IntWritable();
@@ -74,21 +75,25 @@ public class UDFRound extends UDF {
     return evaluate(n, i.get());
   }
 
-  private BigDecimalWritable evaluate(BigDecimalWritable n, int i) {
+  private HiveDecimalWritable evaluate(HiveDecimalWritable n, int i) {
     if (n == null) {
       return null;
     }
-    BigDecimal bd = n.getBigDecimal();
-    bd = n.getBigDecimal().setScale(i, RoundingMode.HALF_UP);
-    bigDecimalWritable.set(bd);
-    return bigDecimalWritable;
+    HiveDecimal bd = n.getHiveDecimal();
+    try {
+      bd = n.getHiveDecimal().setScale(i, HiveDecimal.ROUND_HALF_UP);
+    } catch (NumberFormatException e) {
+      return null;
+    }
+    decimalWritable.set(bd);
+    return decimalWritable;
   }
 
-  public BigDecimalWritable evaluate(BigDecimalWritable n) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable n) {
     return evaluate(n, 0);
   }
 
-  public BigDecimalWritable evaluate(BigDecimalWritable n, IntWritable i) {
+  public HiveDecimalWritable evaluate(HiveDecimalWritable n, IntWritable i) {
     if (i == null) {
       return null;
     }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java Wed Apr 17 07:29:38 2013
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.math.BigDecimal;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -175,11 +175,11 @@ public class UDFToBoolean extends UDF {
     }
   }
 
-  public BooleanWritable evaluate(BigDecimalWritable i) {
+  public BooleanWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      booleanWritable.set(BigDecimal.ZERO.compareTo(i.getBigDecimal()) != 0);
+      booleanWritable.set(HiveDecimal.ZERO.compareTo(i.getHiveDecimal()) != 0);
       return booleanWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -182,11 +182,11 @@ public class UDFToByte extends UDF {
     }
   }
 
-  public ByteWritable evaluate(BigDecimalWritable i) {
+  public ByteWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      byteWritable.set(i.getBigDecimal().byteValue());
+      byteWritable.set(i.getHiveDecimal().byteValue());
       return byteWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -184,11 +184,11 @@ public class UDFToDouble extends UDF {
     }
   }
 
-  public DoubleWritable evaluate(BigDecimalWritable i) {
+  public DoubleWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      doubleWritable.set(i.getBigDecimal().doubleValue());
+      doubleWritable.set(i.getHiveDecimal().doubleValue());
       return doubleWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -185,11 +185,11 @@ public class UDFToFloat extends UDF {
     }
   }
 
-  public FloatWritable evaluate(BigDecimalWritable i) {
+  public FloatWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      floatWritable.set(i.getBigDecimal().floatValue());
+      floatWritable.set(i.getHiveDecimal().floatValue());
       return floatWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -189,11 +189,11 @@ public class UDFToInteger extends UDF {
     }
   }
 
-  public IntWritable evaluate(BigDecimalWritable i) {
+  public IntWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      intWritable.set(i.getBigDecimal().intValue());
+      intWritable.set(i.getHiveDecimal().intValue());
       return intWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -193,11 +193,11 @@ public class UDFToLong extends UDF {
     }
   }
 
-  public LongWritable evaluate(BigDecimalWritable i) {
+  public LongWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set(i.getBigDecimal().longValue());
+      longWritable.set(i.getHiveDecimal().longValue());
       return longWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java Wed Apr 17 07:29:38 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -182,11 +182,11 @@ public class UDFToShort extends UDF {
     }
   }
 
-  public ShortWritable evaluate(BigDecimalWritable i) {
+  public ShortWritable evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set(i.getBigDecimal().shortValue());
+      shortWritable.set(i.getHiveDecimal().shortValue());
       return shortWritable;
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java Wed Apr 17 07:29:38 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -142,7 +142,7 @@ public class UDFToString extends UDF {
     }
   }
 
-  public Text evaluate(BigDecimalWritable i) {
+  public Text evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;
     } else {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java Wed Apr 17 07:29:38 2013
@@ -21,17 +21,21 @@ import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
@@ -71,7 +75,9 @@ public class GenericUDAFAverage extends 
     case DOUBLE:
     case STRING:
     case TIMESTAMP:
-      return new GenericUDAFAverageEvaluator();
+      return new GenericUDAFAverageEvaluatorDouble();
+    case DECIMAL:
+      return new GenericUDAFAverageEvaluatorDecimal();
     case BOOLEAN:
     default:
       throw new UDFArgumentTypeException(0,
@@ -80,27 +86,183 @@ public class GenericUDAFAverage extends 
     }
   }
 
-  /**
-   * GenericUDAFAverageEvaluator.
-   *
-   */
-  public static class GenericUDAFAverageEvaluator extends GenericUDAFEvaluator {
 
-    // For PARTIAL1 and COMPLETE
-    PrimitiveObjectInspector inputOI;
+  public static class GenericUDAFAverageEvaluatorDouble extends AbstractGenericUDAFAverageEvaluator<Double> {
 
-    // For PARTIAL2 and FINAL
-    StructObjectInspector soi;
-    StructField countField;
-    StructField sumField;
-    LongObjectInspector countFieldOI;
-    DoubleObjectInspector sumFieldOI;
+    @Override
+    public void doReset(AverageAggregationBuffer<Double> aggregation) throws HiveException {
+      aggregation.count = 0;
+      aggregation.sum = new Double(0);
+    }
+
+    @Override
+    protected ObjectInspector getSumFieldJavaObjectInspector() {
+      return PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
+    }
+    @Override
+    protected ObjectInspector getSumFieldWritableObjectInspector() {
+      return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+    }
+
+    @Override
+    protected void doIterate(AverageAggregationBuffer<Double> aggregation,
+        PrimitiveObjectInspector oi, Object parameter) {
+      double value = PrimitiveObjectInspectorUtils.getDouble(parameter, oi);
+      aggregation.count++;
+      aggregation.sum += value;
+
+    }
 
+    @Override
+    protected void doMerge(AverageAggregationBuffer<Double> aggregation, Long partialCount,
+        ObjectInspector sumFieldOI, Object partialSum) {
+      double value = ((DoubleObjectInspector)sumFieldOI).get(partialSum);
+      aggregation.count += partialCount;
+      aggregation.sum += value;
+    }
+
+    @Override
+    protected void doTerminatePartial(AverageAggregationBuffer<Double> aggregation) {
+      if(partialResult[1] == null) {
+        partialResult[1] = new DoubleWritable(0);
+      }
+      ((LongWritable) partialResult[0]).set(aggregation.count);
+      ((DoubleWritable) partialResult[1]).set(aggregation.sum);
+    }
+
+    @Override
+    protected Object doTerminate(AverageAggregationBuffer<Double> aggregation) {
+      if(aggregation.count == 0) {
+        return null;
+      } else {
+        DoubleWritable result = new DoubleWritable(0);
+        result.set(aggregation.sum / aggregation.count);
+        return result;
+      }
+    }
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      AverageAggregationBuffer<Double> result = new AverageAggregationBuffer<Double>();
+      reset(result);
+      return result;
+    }
+  }
+
+  public static class GenericUDAFAverageEvaluatorDecimal extends AbstractGenericUDAFAverageEvaluator<HiveDecimal> {
+
+    @Override
+    public void doReset(AverageAggregationBuffer<HiveDecimal> aggregation) throws HiveException {
+      aggregation.count = 0;
+      aggregation.sum = HiveDecimal.ZERO;
+    }
+
+    @Override
+    protected ObjectInspector getSumFieldJavaObjectInspector() {
+      return PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector;
+    }
+    @Override
+    protected ObjectInspector getSumFieldWritableObjectInspector() {
+      return PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector;
+    }
+
+    @Override
+    protected void doIterate(AverageAggregationBuffer<HiveDecimal> aggregation,
+        PrimitiveObjectInspector oi, Object parameter) {
+      HiveDecimal value = PrimitiveObjectInspectorUtils.getHiveDecimal(parameter, oi);
+      aggregation.count++;
+      if (aggregation.sum != null) {
+        try {
+          aggregation.sum = aggregation.sum.add(value);
+        } catch (NumberFormatException e) {
+          aggregation.sum = null;
+        }
+      }
+    }
+
+    @Override
+    protected void doMerge(AverageAggregationBuffer<HiveDecimal> aggregation, Long partialCount,
+        ObjectInspector sumFieldOI, Object partialSum) {
+      HiveDecimal value = ((HiveDecimalObjectInspector)sumFieldOI).getPrimitiveJavaObject(partialSum);
+      if (value == null) {
+        aggregation.sum = null;
+      }
+      aggregation.count += partialCount;
+      if (aggregation.sum != null) {
+        try {
+          aggregation.sum = aggregation.sum.add(value);
+        } catch (NumberFormatException e) {
+          aggregation.sum = null;
+        }
+      }
+    }
+
+    @Override
+    protected void doTerminatePartial(AverageAggregationBuffer<HiveDecimal> aggregation) {
+      if(partialResult[1] == null && aggregation.sum != null) {
+        partialResult[1] = new HiveDecimalWritable(HiveDecimal.ZERO);
+      }
+      ((LongWritable) partialResult[0]).set(aggregation.count);
+      if (aggregation.sum != null) {
+        ((HiveDecimalWritable) partialResult[1]).set(aggregation.sum);
+      } else {
+        partialResult[1] = null;
+      }
+    }
+
+    @Override
+    protected Object doTerminate(AverageAggregationBuffer<HiveDecimal> aggregation) {
+      if(aggregation.count == 0 || aggregation.sum == null) {
+        return null;
+      } else {
+        HiveDecimalWritable result = new HiveDecimalWritable(HiveDecimal.ZERO);
+        try {
+          result.set(aggregation.sum.divide(new HiveDecimal(aggregation.count)));
+        } catch (NumberFormatException e) {
+          result = null;
+        }
+        return result;
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      AverageAggregationBuffer<HiveDecimal> result = new AverageAggregationBuffer<HiveDecimal>();
+      reset(result);
+      return result;
+    }
+  }
+
+  private static class AverageAggregationBuffer<TYPE> implements AggregationBuffer {
+    private long count;
+    private TYPE sum;
+  };
+
+  @SuppressWarnings("unchecked")
+  public static abstract class AbstractGenericUDAFAverageEvaluator<TYPE> extends GenericUDAFEvaluator {
+
+    // For PARTIAL1 and COMPLETE
+    private PrimitiveObjectInspector inputOI;
+    // For PARTIAL2 and FINAL
+    private StructObjectInspector soi;
+    private StructField countField;
+    private StructField sumField;
+    private LongObjectInspector countFieldOI;
+    private ObjectInspector sumFieldOI;
     // For PARTIAL1 and PARTIAL2
-    Object[] partialResult;
+    protected Object[] partialResult;
+
+    private boolean warned = false;
 
-    // For FINAL and COMPLETE
-    DoubleWritable result;
+
+    protected abstract ObjectInspector getSumFieldJavaObjectInspector();
+    protected abstract ObjectInspector getSumFieldWritableObjectInspector();
+    protected abstract void doIterate(AverageAggregationBuffer<TYPE> aggregation,
+        PrimitiveObjectInspector inputOI, Object parameter);
+    protected abstract void doMerge(AverageAggregationBuffer<TYPE> aggregation, Long partialCount,
+        ObjectInspector sumFieldOI, Object partialSum);
+    protected abstract void doTerminatePartial(AverageAggregationBuffer<TYPE> aggregation);
+    protected abstract Object doTerminate(AverageAggregationBuffer<TYPE> aggregation);
+    protected abstract void doReset(AverageAggregationBuffer<TYPE> aggregation) throws HiveException;
 
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
@@ -115,9 +277,8 @@ public class GenericUDAFAverage extends 
         soi = (StructObjectInspector) parameters[0];
         countField = soi.getStructFieldRef("count");
         sumField = soi.getStructFieldRef("sum");
-        countFieldOI = (LongObjectInspector) countField
-            .getFieldObjectInspector();
-        sumFieldOI = (DoubleObjectInspector) sumField.getFieldObjectInspector();
+        countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+        sumFieldOI = sumField.getFieldObjectInspector();
       }
 
       // init output
@@ -127,96 +288,63 @@ public class GenericUDAFAverage extends 
 
         ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(getSumFieldWritableObjectInspector());
         ArrayList<String> fname = new ArrayList<String>();
         fname.add("count");
         fname.add("sum");
         partialResult = new Object[2];
         partialResult[0] = new LongWritable(0);
-        partialResult[1] = new DoubleWritable(0);
+        // index 1 set by child
         return ObjectInspectorFactory.getStandardStructObjectInspector(fname,
             foi);
 
       } else {
-        result = new DoubleWritable(0);
-        return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+        return getSumFieldWritableObjectInspector();
       }
     }
 
-    static class AverageAgg implements AggregationBuffer {
-      long count;
-      double sum;
-    };
-
     @Override
-    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-      AverageAgg result = new AverageAgg();
-      reset(result);
-      return result;
-    }
-
-    @Override
-    public void reset(AggregationBuffer agg) throws HiveException {
-      AverageAgg myagg = (AverageAgg) agg;
-      myagg.count = 0;
-      myagg.sum = 0;
+    public void reset(AggregationBuffer aggregation) throws HiveException {
+      doReset((AverageAggregationBuffer<TYPE>)aggregation);
     }
 
-    boolean warned = false;
-
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters)
+    public void iterate(AggregationBuffer aggregation, Object[] parameters)
         throws HiveException {
       assert (parameters.length == 1);
-      Object p = parameters[0];
-      if (p != null) {
-        AverageAgg myagg = (AverageAgg) agg;
+      Object parameter = parameters[0];
+      if (parameter != null) {
+        AverageAggregationBuffer<TYPE> averageAggregation = (AverageAggregationBuffer<TYPE>) aggregation;
         try {
-          double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
-          myagg.count++;
-          myagg.sum += v;
+          doIterate(averageAggregation, inputOI, parameter);
         } catch (NumberFormatException e) {
           if (!warned) {
             warned = true;
-            LOG.warn(getClass().getSimpleName() + " "
-                + StringUtils.stringifyException(e));
-            LOG.warn(getClass().getSimpleName()
-                + " ignoring similar exceptions.");
+            LOG.warn("Ignoring similar exceptions: " + StringUtils.stringifyException(e));
           }
         }
       }
     }
 
     @Override
-    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
-      AverageAgg myagg = (AverageAgg) agg;
-      ((LongWritable) partialResult[0]).set(myagg.count);
-      ((DoubleWritable) partialResult[1]).set(myagg.sum);
+    public Object terminatePartial(AggregationBuffer aggregation) throws HiveException {
+      doTerminatePartial((AverageAggregationBuffer<TYPE>) aggregation);
       return partialResult;
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial)
+    public void merge(AggregationBuffer aggregation, Object partial)
         throws HiveException {
       if (partial != null) {
-        AverageAgg myagg = (AverageAgg) agg;
-        Object partialCount = soi.getStructFieldData(partial, countField);
-        Object partialSum = soi.getStructFieldData(partial, sumField);
-        myagg.count += countFieldOI.get(partialCount);
-        myagg.sum += sumFieldOI.get(partialSum);
+        doMerge((AverageAggregationBuffer<TYPE>)aggregation,
+            countFieldOI.get(soi.getStructFieldData(partial, countField)),
+            sumFieldOI, soi.getStructFieldData(partial, sumField));
       }
     }
 
     @Override
-    public Object terminate(AggregationBuffer agg) throws HiveException {
-      AverageAgg myagg = (AverageAgg) agg;
-      if (myagg.count == 0) {
-        return null;
-      } else {
-        result.set(myagg.sum / myagg.count);
-        return result;
-      }
+    public Object terminate(AggregationBuffer aggregation) throws HiveException {
+      return doTerminate((AverageAggregationBuffer<TYPE>)aggregation);
     }
   }
-
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java Wed Apr 17 07:29:38 2013
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -55,29 +56,49 @@ public class GenericUDAFCumeDist extends
 		return new GenericUDAFCumeDistEvaluator();
 	}
 
-	public static class GenericUDAFCumeDistEvaluator extends GenericUDAFRankEvaluator
-	{
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
-		{
-			super.init(m, parameters);
-			return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			ArrayList<IntWritable> ranks =  ((RankBuffer) agg).rowNums;
-			double sz = ranks.size();
-			ArrayList<DoubleWritable> pranks = new ArrayList<DoubleWritable>(ranks.size());
-
-			for(IntWritable i : ranks)
-			{
-				double pr = ((double)i.get())/sz;
-				pranks.add(new DoubleWritable(pr));
-			}
-
-			return pranks;
-		}
-	}
+  public static class GenericUDAFCumeDistEvaluator extends GenericUDAFRankEvaluator
+  {
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
+    {
+      super.init(m, parameters);
+      return ObjectInspectorFactory
+          .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException
+    {
+      List<IntWritable> ranks = ((RankBuffer) agg).rowNums;
+      int ranksSize = ranks.size();
+      double ranksSizeDouble = ranksSize;
+      List<DoubleWritable> distances = new ArrayList<DoubleWritable>(ranksSize);
+      int last = -1;
+      int current = -1;
+      // tracks the number of elements with the same rank at the current time
+      int elementsAtRank = 1;
+      for (int index = 0; index < ranksSize; index++) {
+        current = ranks.get(index).get();
+        if (index == 0) {
+          last = current;
+        } else if (last == current) {
+          elementsAtRank++;
+        } else {
+          last = current;
+          double distance = ((double) index) / ranksSizeDouble;
+          while (elementsAtRank-- > 0) {
+            distances.add(new DoubleWritable(distance));
+          }
+          elementsAtRank = 1;
+        }
+      }
+      if (ranksSize > 0 && last == current) {
+        double distance = ((double) ranksSize) / ranksSizeDouble;
+        while (elementsAtRank-- > 0) {
+          distances.add(new DoubleWritable(distance));
+        }
+      }
+      return distances;
+    }
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java Wed Apr 17 07:29:38 2013
@@ -17,16 +17,15 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.math.BigDecimal;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -70,7 +69,7 @@ public class GenericUDAFSum extends Abst
     case STRING:
       return new GenericUDAFSumDouble();
     case DECIMAL:
-      return new GenericUDAFSumBigDecimal();
+      return new GenericUDAFSumHiveDecimal();
     case BOOLEAN:
     default:
       throw new UDFArgumentTypeException(0,
@@ -80,40 +79,40 @@ public class GenericUDAFSum extends Abst
   }
 
   /**
-   * GenericUDAFSumBigDecimal.
+   * GenericUDAFSumHiveDecimal.
    *
    */
-  public static class GenericUDAFSumBigDecimal extends GenericUDAFEvaluator {
+  public static class GenericUDAFSumHiveDecimal extends GenericUDAFEvaluator {
     private PrimitiveObjectInspector inputOI;
-    private BigDecimalWritable result;
+    private HiveDecimalWritable result;
 
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
       assert (parameters.length == 1);
       super.init(m, parameters);
-      result = new BigDecimalWritable(BigDecimal.ZERO);
+      result = new HiveDecimalWritable(HiveDecimal.ZERO);
       inputOI = (PrimitiveObjectInspector) parameters[0];
-      return PrimitiveObjectInspectorFactory.writableBigDecimalObjectInspector;
+      return PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector;
     }
 
     /** class for storing decimal sum value. */
-    static class SumBigDecimalAgg implements AggregationBuffer {
+    static class SumHiveDecimalAgg implements AggregationBuffer {
       boolean empty;
-      BigDecimal sum;
+      HiveDecimal sum;
     }
 
     @Override
     public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-      SumBigDecimalAgg agg = new SumBigDecimalAgg();
+      SumHiveDecimalAgg agg = new SumHiveDecimalAgg();
       reset(agg);
       return agg;
     }
 
     @Override
     public void reset(AggregationBuffer agg) throws HiveException {
-      SumBigDecimalAgg bdAgg = (SumBigDecimalAgg) agg;
+      SumHiveDecimalAgg bdAgg = (SumHiveDecimalAgg) agg;
       bdAgg.empty = true;
-      bdAgg.sum = BigDecimal.ZERO;
+      bdAgg.sum = HiveDecimal.ZERO;
     }
 
     boolean warned = false;
@@ -143,17 +142,26 @@ public class GenericUDAFSum extends Abst
     @Override
     public void merge(AggregationBuffer agg, Object partial) throws HiveException {
       if (partial != null) {
-        SumBigDecimalAgg myagg = (SumBigDecimalAgg) agg;
+        SumHiveDecimalAgg myagg = (SumHiveDecimalAgg) agg;
+        if (myagg.sum == null) {
+          return;
+        }
+
         myagg.empty = false;
-        myagg.sum = myagg.sum.add(
-            PrimitiveObjectInspectorUtils.getBigDecimal(partial, inputOI));
+
+        try {
+          myagg.sum = myagg.sum.add(
+            PrimitiveObjectInspectorUtils.getHiveDecimal(partial, inputOI));
+        } catch (NumberFormatException e) {
+          myagg.sum = null;
+        }
       }
     }
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      SumBigDecimalAgg myagg = (SumBigDecimalAgg) agg;
-      if (myagg.empty) {
+      SumHiveDecimalAgg myagg = (SumHiveDecimalAgg) agg;
+      if (myagg.empty || myagg.sum == null) {
         return null;
       }
       result.set(myagg.sum);

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java Wed Apr 17 07:29:38 2013
@@ -20,16 +20,16 @@ package org.apache.hadoop.hive.ql.udf.ge
 
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.math.BigDecimal;
 import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.UDFType;
-import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -166,7 +166,7 @@ public class GenericUDFReflect2 extends 
         ((BytesWritable)returnObj).set((byte[])result, 0, ((byte[]) result).length);
         return returnObj;
       case DECIMAL:
-        ((BigDecimalWritable)returnObj).set((BigDecimal)result);
+        ((HiveDecimalWritable)returnObj).set((HiveDecimal)result);
         return returnObj;
     }
     throw new HiveException("Invalid type " + returnOI.getPrimitiveCategory());

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java?rev=1468783&r1=1468782&r2=1468783&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDecimal.java Wed Apr 17 07:29:38 2013
@@ -23,14 +23,14 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.BigDecimalConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveDecimalConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
 @Description(name = "decimal", value = "_FUNC_(a) - cast a to decimal")
 public class GenericUDFToDecimal extends GenericUDF {
 
   private PrimitiveObjectInspector argumentOI;
-  private BigDecimalConverter bdConverter;
+  private HiveDecimalConverter bdConverter;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
@@ -46,9 +46,9 @@ public class GenericUDFToDecimal extends
           "The function DECIMAL takes only primitive types");
     }
 
-    bdConverter = new BigDecimalConverter(argumentOI,
-        PrimitiveObjectInspectorFactory.writableBigDecimalObjectInspector);
-    return PrimitiveObjectInspectorFactory.writableBigDecimalObjectInspector;
+    bdConverter = new HiveDecimalConverter(argumentOI,
+        PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector);
+    return PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector;
   }
 
   @Override