You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/11/01 21:55:03 UTC

svn commit: r1636047 [2/12] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/type/ itests/src/test/resources/ ql/src/gen/vectorization/ExpressionTemplates/ ql/src/gen/vectorization/UDAFTemplates/ ql/src/java/org/apache/hadoop/hive/ql/exe...

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java Sat Nov  1 20:55:00 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalToStringUnaryUDF;
 
 /**
  * To support vectorized cast of decimal to string.
@@ -43,7 +44,7 @@ public class CastDecimalToString extends
 
   @Override
   protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) {
-    String s = inV.vector[i].getHiveDecimalString();
+    String s = inV.vector[i].getHiveDecimal().toString();
     byte[] b = null;
     try {
       b = s.getBytes("UTF-8");

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java Sat Nov  1 20:55:00 2014
@@ -18,8 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
-import org.apache.hadoop.hive.common.type.SqlMathUtil;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 
@@ -31,34 +30,23 @@ import org.apache.hadoop.hive.ql.exec.ve
 public class CastDecimalToTimestamp extends FuncDecimalToLong {
   private static final long serialVersionUID = 1L;
 
-  /* The field tmp is a scratch variable for this operation. It is
-   * purposely not made static because if this code is ever made multi-threaded,
-   * each thread will then have its own VectorExpression tree and thus
-   * its own copy of the variable.
-   */
-  private transient Decimal128 tmp = null;
-  private static transient Decimal128 tenE9 = new Decimal128(1000000000);
+  private static transient HiveDecimal tenE9 = HiveDecimal.create(1000000000);
 
   public CastDecimalToTimestamp(int inputColumn, int outputColumn) {
     super(inputColumn, outputColumn);
-    tmp = new Decimal128(0);
   }
 
   public CastDecimalToTimestamp() {
-
-    // initialize local field after deserialization
-    tmp = new Decimal128(0);
   }
 
   @Override
   protected void func(LongColumnVector outV, DecimalColumnVector inV,  int i) {
-    tmp.update(inV.vector[i]);
-
-    // Reduce scale at most by 9, therefore multiplication will not require rounding.
-    int newScale = inV.scale > 9 ? (inV.scale - 9) : 0;
-    tmp.multiplyDestructive(tenE9, (short) newScale);
-
-    // set output
-    outV.vector[i] = tmp.longValue();
+    HiveDecimal result = inV.vector[i].getHiveDecimal().multiply(tenE9);
+    if (result == null) {
+      outV.noNulls = false;
+      outV.isNull[i] = true;
+    } else {
+      outV.vector[i] = result.longValue();
+    }
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 
@@ -38,7 +39,7 @@ public class CastDoubleToDecimal extends
 
   @Override
   protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i) {
-    outV.vector[i].update(inV.vector[i], outV.scale);
-    outV.checkPrecisionOverflow(i);
+    String s = ((Double) inV.vector[i]).toString();
+    outV.vector[i].set(HiveDecimal.create(s));
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 
@@ -40,7 +41,6 @@ public class CastLongToDecimal extends F
 
   @Override
   protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) {
-    outV.vector[i].update(inV.vector[i], outV.scale);
-    outV.checkPrecisionOverflow(i);
+    outV.vector[i].set(HiveDecimal.create(inV.vector[i]));
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
@@ -58,14 +59,13 @@ public class CastStringToDecimal extends
        * making a new string.
        */
       s = new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8");
-      outV.vector[i].update(s, outV.scale);
+      outV.vector[i].set(HiveDecimal.create(s));
     } catch (Exception e) {
 
       // for any exception in conversion to decimal, produce NULL
       outV.noNulls = false;
       outV.isNull[i] = true;
     }
-    outV.checkPrecisionOverflow(i);
   }
 
   @Override

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 
@@ -39,9 +40,10 @@ public class CastTimestampToDecimal exte
   @Override
   protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) {
 
-    // the resulting decimal value is 10e-9 * the input long value.
-    outV.vector[i].updateFixedPoint(inV.vector[i], (short) 9);
-    outV.vector[i].changeScaleDestructive(outV.scale);
-    outV.checkPrecisionOverflow(i);
+    // The resulting decimal value is 10e-9 * the input long value (i.e. seconds).
+    //
+    HiveDecimal result = HiveDecimal.create(inV.vector[i]);
+    result = result.scaleByPowerOfTen(-9);
+    outV.set(i, result);
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java Sat Nov  1 20:55:00 2014
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.*;
@@ -44,7 +44,7 @@ public class ConstantVectorExpression ex
   protected long longValue = 0;
   private double doubleValue = 0;
   private byte[] bytesValue = null;
-  private Decimal128 decimalValue = null;
+  private HiveDecimal decimalValue = null;
   private boolean isNullValue = false;
 
   private Type type;
@@ -85,7 +85,7 @@ public class ConstantVectorExpression ex
     setBytesValue(value.getValue().getBytes());
   }
 
-  public ConstantVectorExpression(int outputColumn, Decimal128 value) {
+  public ConstantVectorExpression(int outputColumn, HiveDecimal value) {
     this(outputColumn, "decimal");
     setDecimalValue(value);
   }
@@ -137,7 +137,7 @@ public class ConstantVectorExpression ex
     dcv.isRepeating = true;
     dcv.noNulls = !isNullValue;
     if (!isNullValue) {
-      dcv.vector[0].update(decimalValue);
+      dcv.vector[0].set(decimalValue);
     } else {
       dcv.isNull[0] = true;
     }
@@ -191,7 +191,7 @@ public class ConstantVectorExpression ex
     this.bytesValueLength = bytesValue.length;
   }
 
-  public void setDecimalValue(Decimal128 decimalValue) {
+  public void setDecimalValue(HiveDecimal decimalValue) {
     this.decimalValue = decimalValue;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java Sat Nov  1 20:55:00 2014
@@ -18,11 +18,12 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.HashSet;
 
@@ -32,11 +33,11 @@ import java.util.HashSet;
 public class DecimalColumnInList extends VectorExpression implements IDecimalInExpr {
   private static final long serialVersionUID = 1L;
   private int inputCol;
-  private Decimal128[] inListValues;
+  private HiveDecimal[] inListValues;
   private int outputColumn;
 
   // The set object containing the IN list.
-  private transient HashSet<Decimal128> inSet;
+  private transient HashSet<HiveDecimal> inSet;
 
   public DecimalColumnInList() {
     super();
@@ -60,8 +61,8 @@ public class DecimalColumnInList extends
     }
 
     if (inSet == null) {
-      inSet = new HashSet<Decimal128>(inListValues.length);
-      for (Decimal128 val : inListValues) {
+      inSet = new HashSet<HiveDecimal>(inListValues.length);
+      for (HiveDecimal val : inListValues) {
         inSet.add(val);
       }
     }
@@ -72,7 +73,7 @@ public class DecimalColumnInList extends
     boolean[] nullPos = inputColVector.isNull;
     boolean[] outNulls = outputColVector.isNull;
     int n = batch.size;
-    Decimal128[] vector = inputColVector.vector;
+    HiveDecimalWritable[] vector = inputColVector.vector;
     long[] outputVector = outputColVector.vector;
 
     // return immediately if batch is empty
@@ -87,16 +88,16 @@ public class DecimalColumnInList extends
 
         // All must be selected otherwise size would be zero
         // Repeating property will not change.
-        outputVector[0] = inSet.contains(vector[0]) ? 1 : 0;
+        outputVector[0] = inSet.contains(vector[0].getHiveDecimal()) ? 1 : 0;
         outputColVector.isRepeating = true;
       } else if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+          outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0;
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+          outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0;
         }
       }
     } else {
@@ -105,7 +106,7 @@ public class DecimalColumnInList extends
         //All must be selected otherwise size would be zero
         //Repeating property will not change.
         if (!nullPos[0]) {
-          outputVector[0] = inSet.contains(vector[0]) ? 1 : 0;
+          outputVector[0] = inSet.contains(vector[0].getHiveDecimal()) ? 1 : 0;
           outNulls[0] = false;
         } else {
           outNulls[0] = true;
@@ -116,14 +117,14 @@ public class DecimalColumnInList extends
           int i = sel[j];
           outNulls[i] = nullPos[i];
           if (!nullPos[i]) {
-            outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+            outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0;
           }
         }
       } else {
         System.arraycopy(nullPos, 0, outNulls, 0, n);
         for(int i = 0; i != n; i++) {
           if (!nullPos[i]) {
-            outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+            outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0;
           }
         }
       }
@@ -148,11 +149,7 @@ public class DecimalColumnInList extends
     return null;
   }
 
-  public Decimal128[] getInListValues() {
-    return this.inListValues;
-  }
-
-  public void setInListValues(Decimal128[] a) {
+  public void setInListValues(HiveDecimal[] a) {
     this.inListValues = a;
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java Sat Nov  1 20:55:00 2014
@@ -18,32 +18,60 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.SqlMathUtil;
-import org.apache.hadoop.hive.common.type.UnsignedInt128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.udf.generic.RoundUtils;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
  * Utility functions for vector operations on decimal values.
  */
 public class DecimalUtil {
 
-  public static final Decimal128 DECIMAL_ONE = new Decimal128();
-  private static final UnsignedInt128 scratchUInt128 = new UnsignedInt128();
+  public static int compare(HiveDecimalWritable writableLeft, HiveDecimal right) {
+    return writableLeft.getHiveDecimal().compareTo(right);
+  }
 
-  static {
-    DECIMAL_ONE.update(1L, (short) 0);
+  public static int compare(HiveDecimal left, HiveDecimalWritable writableRight) {
+    return left.compareTo(writableRight.getHiveDecimal());
   }
 
   // Addition with overflow check. Overflow produces NULL output.
-  public static void addChecked(int i, Decimal128 left, Decimal128 right,
+  public static void addChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.add(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void addChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().add(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void addChecked(int i, HiveDecimalWritable left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().add(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void addChecked(int i, HiveDecimal left, HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.add(left, right, outputColVector.vector[i], outputColVector.scale);
-      outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.add(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on overflow
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -51,11 +79,40 @@ public class DecimalUtil {
   }
 
   // Subtraction with overflow check. Overflow produces NULL output.
-  public static void subtractChecked(int i, Decimal128 left, Decimal128 right,
+  public static void subtractChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.subtract(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void subtractChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().subtract(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void subtractChecked(int i, HiveDecimalWritable left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().subtract(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void subtractChecked(int i, HiveDecimal left, HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.subtract(left, right, outputColVector.vector[i], outputColVector.scale);
-      outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.subtract(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on overflow
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -63,11 +120,40 @@ public class DecimalUtil {
   }
 
   // Multiplication with overflow check. Overflow produces NULL output.
-  public static void multiplyChecked(int i, Decimal128 left, Decimal128 right,
+  public static void multiplyChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.multiply(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void multiplyChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().multiply(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void multiplyChecked(int i, HiveDecimalWritable left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().multiply(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void multiplyChecked(int i, HiveDecimal left, HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.multiply(left, right, outputColVector.vector[i], outputColVector.scale);
-      outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.multiply(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on overflow
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -75,11 +161,40 @@ public class DecimalUtil {
   }
 
   // Division with overflow/zero-divide check. Error produces NULL output.
-  public static void divideChecked(int i, Decimal128 left, Decimal128 right,
+  public static void divideChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.divide(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void divideChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().divide(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void divideChecked(int i, HiveDecimalWritable left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().divide(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void divideChecked(int i, HiveDecimal left, HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.divide(left, right, outputColVector.vector[i], outputColVector.scale);
-      outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.divide(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on error
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -87,80 +202,138 @@ public class DecimalUtil {
   }
 
   // Modulo operator with overflow/zero-divide check.
-  public static void moduloChecked(int i, Decimal128 left, Decimal128 right,
+  public static void moduloChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.remainder(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void moduloChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().remainder(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void moduloChecked(int i, HiveDecimalWritable left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().remainder(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void moduloChecked(int i, HiveDecimal left, HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.modulo(left, right, outputColVector.vector[i], outputColVector.scale);
-      outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.remainder(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on error
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void floor(int i, Decimal128 input, DecimalColumnVector outputColVector) {
+  public static void floor(int i, HiveDecimal input, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_FLOOR));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void floor(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.getHiveDecimal().setScale(0, HiveDecimal.ROUND_FLOOR));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void ceiling(int i, HiveDecimal input, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_CEILING));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void ceiling(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.getHiveDecimal().setScale(0, HiveDecimal.ROUND_CEILING));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void round(int i, HiveDecimal input, DecimalColumnVector outputColVector) {
     try {
-      Decimal128 result = outputColVector.vector[i];
-      result.update(input);
-      result.zeroFractionPart(scratchUInt128);
-      result.changeScaleDestructive(outputColVector.scale);
-      if ((result.compareTo(input) != 0) && input.getSignum() < 0) {
-        result.subtractDestructive(DECIMAL_ONE, outputColVector.scale);
-      }
+      outputColVector.set(i, RoundUtils.round(input, outputColVector.scale));
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void ceiling(int i, Decimal128 input, DecimalColumnVector outputColVector) {
+  public static void round(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) {
     try {
-      Decimal128 result = outputColVector.vector[i];
-      result.update(input);
-      result.zeroFractionPart(scratchUInt128);
-      result.changeScaleDestructive(outputColVector.scale);
-      if ((result.compareTo(input) != 0) && input.getSignum() > 0) {
-        result.addDestructive(DECIMAL_ONE, outputColVector.scale);
-      }
+      outputColVector.set(i, RoundUtils.round(input.getHiveDecimal(), outputColVector.scale));
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void round(int i, Decimal128 input, DecimalColumnVector outputColVector) {
-    HiveDecimal inputHD = HiveDecimal.create(input.toBigDecimal());
-    HiveDecimal result = RoundUtils.round(inputHD, outputColVector.scale);
-    if (result == null) {
+  public static void sign(int i, HiveDecimal input, LongColumnVector outputColVector) {
+    outputColVector.vector[i] = input.signum();
+  }
+
+  public static void sign(int i, HiveDecimalWritable input, LongColumnVector outputColVector) {
+    outputColVector.vector[i] = input.getHiveDecimal().signum();
+  }
+
+  public static void abs(int i, HiveDecimal input, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.abs());
+    } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
-    } else {
-      outputColVector.vector[i].update(result.bigDecimalValue().toPlainString(), outputColVector.scale);
     }
   }
 
-  public static void sign(int i, Decimal128 input, LongColumnVector outputColVector) {
-    outputColVector.vector[i] = input.getSignum();
+  public static void abs(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.getHiveDecimal().abs());
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
   }
 
-  public static void abs(int i, Decimal128 input, DecimalColumnVector outputColVector) {
-    Decimal128 result = outputColVector.vector[i];
+  public static void negate(int i, HiveDecimal input, DecimalColumnVector outputColVector) {
     try {
-      result.update(input);
-      result.absDestructive();
-      result.changeScaleDestructive(outputColVector.scale);
+      outputColVector.set(i, input.negate());
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void negate(int i, Decimal128 input, DecimalColumnVector outputColVector) {
-    Decimal128 result = outputColVector.vector[i];
+  public static void negate(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) {
     try {
-      result.update(input);
-      result.negateDestructive();
-      result.changeScaleDestructive(outputColVector.scale);
+      outputColVector.set(i, input.getHiveDecimal().negate());
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java Sat Nov  1 20:55:00 2014
@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.HashSet;
 
@@ -31,10 +32,10 @@ import java.util.HashSet;
 public class FilterDecimalColumnInList extends VectorExpression implements IDecimalInExpr {
   private static final long serialVersionUID = 1L;
   private int inputCol;
-  private Decimal128[] inListValues;
+  private HiveDecimal[] inListValues;
 
   // The set object containing the IN list.
-  private transient HashSet<Decimal128> inSet;
+  private transient HashSet<HiveDecimal> inSet;
 
   public FilterDecimalColumnInList() {
     super();
@@ -57,8 +58,8 @@ public class FilterDecimalColumnInList e
     }
 
     if (inSet == null) {
-      inSet = new HashSet<Decimal128>(inListValues.length);
-      for (Decimal128 val : inListValues) {
+      inSet = new HashSet<HiveDecimal>(inListValues.length);
+      for (HiveDecimal val : inListValues) {
         inSet.add(val);
       }
     }
@@ -67,7 +68,7 @@ public class FilterDecimalColumnInList e
     int[] sel = batch.selected;
     boolean[] nullPos = inputColVector.isNull;
     int n = batch.size;
-    Decimal128[] vector = inputColVector.vector;
+    HiveDecimalWritable[] vector = inputColVector.vector;
 
     // return immediately if batch is empty
     if (n == 0) {
@@ -80,7 +81,7 @@ public class FilterDecimalColumnInList e
         // All must be selected otherwise size would be zero
         // Repeating property will not change.
 
-        if (!(inSet.contains(vector[0]))) {
+        if (!(inSet.contains(vector[0].getHiveDecimal()))) {
           //Entire batch is filtered out.
           batch.size = 0;
         }
@@ -88,7 +89,7 @@ public class FilterDecimalColumnInList e
         int newSize = 0;
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          if (inSet.contains(vector[i])) {
+          if (inSet.contains(vector[i].getHiveDecimal())) {
             sel[newSize++] = i;
           }
         }
@@ -96,7 +97,7 @@ public class FilterDecimalColumnInList e
       } else {
         int newSize = 0;
         for(int i = 0; i != n; i++) {
-          if (inSet.contains(vector[i])) {
+          if (inSet.contains(vector[i].getHiveDecimal())) {
             sel[newSize++] = i;
           }
         }
@@ -111,7 +112,7 @@ public class FilterDecimalColumnInList e
         //All must be selected otherwise size would be zero
         //Repeating property will not change.
         if (!nullPos[0]) {
-          if (!inSet.contains(vector[0])) {
+          if (!inSet.contains(vector[0].getHiveDecimal())) {
 
             //Entire batch is filtered out.
             batch.size = 0;
@@ -124,7 +125,7 @@ public class FilterDecimalColumnInList e
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           if (!nullPos[i]) {
-           if (inSet.contains(vector[i])) {
+           if (inSet.contains(vector[i].getHiveDecimal())) {
              sel[newSize++] = i;
            }
           }
@@ -136,7 +137,7 @@ public class FilterDecimalColumnInList e
         int newSize = 0;
         for(int i = 0; i != n; i++) {
           if (!nullPos[i]) {
-            if (inSet.contains(vector[i])) {
+            if (inSet.contains(vector[i].getHiveDecimal())) {
               sel[newSize++] = i;
             }
           }
@@ -167,11 +168,7 @@ public class FilterDecimalColumnInList e
     return null;
   }
 
-  public Decimal128[] getInListValues() {
-    return this.inListValues;
-  }
-
-  public void setInListValues(Decimal128[] a) {
+  public void setInListValues(HiveDecimal[] a) {
     this.inListValues = a;
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java Sat Nov  1 20:55:00 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
@@ -117,18 +116,6 @@ public abstract class FuncDecimalToLong 
     return outputColumn;
   }
 
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  public int getInputColumn() {
-    return inputColumn;
-  }
-
-  public void setInputColumn(int inputColumn) {
-    this.inputColumn = inputColumn;
-  }
-
   @Override
   public String getOutputType() {
     return "long";

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java Sat Nov  1 20:55:00 2014
@@ -21,9 +21,9 @@ package org.apache.hadoop.hive.ql.exec.v
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.Arrays;
 
@@ -61,7 +61,7 @@ public class FuncRoundWithNumDigitsDecim
     boolean[] outputIsNull = outputColVector.isNull;
     outputColVector.noNulls = inputColVector.noNulls;
     int n = batch.size;
-    Decimal128[] vector = inputColVector.vector;
+    HiveDecimalWritable[] vector = inputColVector.vector;
 
     // return immediately if batch is empty
     if (n == 0) {
@@ -119,27 +119,6 @@ public class FuncRoundWithNumDigitsDecim
   public String getOutputType() {
     return outputType;
   }
-  
-  public int getColNum() {
-    return colNum;
-  }
-
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  public int getDecimalPlaces() {
-    return decimalPlaces;
-  }
-
-  public void setDecimalPlaces(int decimalPlaces) {
-    this.decimalPlaces = decimalPlaces;
-  }
-
 
   @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java Sat Nov  1 20:55:00 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 
 public interface IDecimalInExpr {
-  void setInListValues(Decimal128[] inVals);
+  void setInListValues(HiveDecimal[] inVals);
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java Sat Nov  1 20:55:00 2014
@@ -300,18 +300,18 @@ public class NullUtil {
     if (v.noNulls) {
       return;
     } else if (v.isRepeating && v.isNull[0]) {
-      v.vector[0].setNullDataValue();
+      v.setNullDataValue(0);
     } else if (selectedInUse) {
       for (int j = 0; j != n; j++) {
         int i = sel[j];
         if(v.isNull[i]) {
-          v.vector[i].setNullDataValue();
+          v.setNullDataValue(i);
         }
       }
     } else {
       for (int i = 0; i != n; i++) {
         if(v.isNull[i]) {
-          v.vector[i].setNullDataValue();
+          v.setNullDataValue(i);
         }
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java Sat Nov  1 20:55:00 2014
@@ -18,9 +18,10 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
 
@@ -34,7 +35,8 @@ public interface VectorExpressionWriter 
   Object writeValue(long value) throws HiveException;
   Object writeValue(double value) throws HiveException;
   Object writeValue(byte[] value, int start, int length) throws HiveException;
-  Object writeValue(Decimal128 value) throws HiveException;
+  Object writeValue(HiveDecimalWritable value) throws HiveException;
+  Object writeValue(HiveDecimal value) throws HiveException;
   Object setValue(Object row, ColumnVector column, int columnRow) throws HiveException;
   Object initValue(Object ost) throws HiveException;
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java Sat Nov  1 20:55:00 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -27,7 +26,6 @@ import java.util.List;
 
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
@@ -134,14 +132,29 @@ public final class VectorExpressionWrite
      * The base implementation must be overridden by the Decimal specialization
      */
     @Override
-    public Object writeValue(Decimal128 value) throws HiveException {
+    public Object writeValue(HiveDecimal value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
 
     /**
      * The base implementation must be overridden by the Decimal specialization
      */
-    public Object setValue(Object field, Decimal128 value) throws HiveException {
+    @Override
+    public Object writeValue(HiveDecimalWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Decimal specialization
+     */
+    public Object setValue(Object field, HiveDecimalWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Decimal specialization
+     */
+    public Object setValue(Object field, HiveDecimal value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
   }
@@ -465,24 +478,35 @@ public final class VectorExpressionWrite
       }
 
       @Override
-      public Object writeValue(Decimal128 value) throws HiveException {
-        return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(obj,
-            HiveDecimal.create(value.toBigDecimal()));
+      public Object writeValue(HiveDecimalWritable value) throws HiveException {
+        return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object writeValue(HiveDecimal value) throws HiveException {
+        return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object setValue(Object field, HiveDecimalWritable value) {
+        if (null == field) {
+          field = initValue(null);
+        }
+        return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(field, value);
       }
 
       @Override
-      public Object setValue(Object field, Decimal128 value) {
+      public Object setValue(Object field, HiveDecimal value) {
         if (null == field) {
           field = initValue(null);
         }
-        return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(field,
-            HiveDecimal.create(value.toBigDecimal()));
+        return ((SettableHiveDecimalObjectInspector) this.objectInspector).set(field, value);
       }
 
       @Override
       public Object initValue(Object ignored) {
         return ((SettableHiveDecimalObjectInspector) this.objectInspector).create(
-            HiveDecimal.create(BigDecimal.ZERO));
+            HiveDecimal.ZERO);
       }
     }.init(fieldObjInspector);
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java Sat Nov  1 20:55:00 2014
@@ -21,9 +21,9 @@ package org.apache.hadoop.hive.ql.exec.v
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
 import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
@@ -41,7 +41,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hive.common.util.Decimal128FastBuffer;
 
 /**
  * Generated from template VectorUDAFAvg.txt.
@@ -57,24 +56,45 @@ public class VectorUDAFAvgDecimal extend
 
       private static final long serialVersionUID = 1L;
 
-      transient private final Decimal128 sum = new Decimal128();
+      transient private final HiveDecimalWritable sum = new HiveDecimalWritable();
       transient private long count;
       transient private boolean isNull;
 
-      public void sumValueWithCheck(Decimal128 value, short scale) {
+      // We use this to catch overflow.
+      transient private boolean isOutOfRange;
+
+      public void sumValueWithNullCheck(HiveDecimalWritable writable, short scale) {
+        if (isOutOfRange) {
+          return;
+        }
+        HiveDecimal value = writable.getHiveDecimal();
         if (isNull) {
-          sum.update(value);
-          sum.changeScaleDestructive(scale);
+          sum.set(value);
           count = 1;
           isNull = false;
         } else {
-          sum.addDestructive(value, scale);
+          HiveDecimal result;
+          try {
+            result = sum.getHiveDecimal().add(value);
+          } catch (ArithmeticException e) {  // catch on overflow
+            isOutOfRange = true;
+            return;
+          }
+          sum.set(result);
           count++;
         }
       }
 
-      public void sumValueNoCheck(Decimal128 value, short scale) {
-        sum.addDestructive(value, scale);
+      public void sumValueNoNullCheck(HiveDecimalWritable writable, short scale) {
+        HiveDecimal value = writable.getHiveDecimal();
+        HiveDecimal result;
+        try {
+          result = sum.getHiveDecimal().add(value);
+        } catch (ArithmeticException e) {  // catch on overflow
+          isOutOfRange = true;
+          return;
+        }
+        sum.set(result);
         count++;
       }
 
@@ -87,7 +107,8 @@ public class VectorUDAFAvgDecimal extend
       @Override
       public void reset() {
         isNull = true;
-        sum.zeroClear();
+        isOutOfRange = false;
+        sum.set(HiveDecimal.ZERO);
         count = 0L;
       }
     }
@@ -98,8 +119,6 @@ public class VectorUDAFAvgDecimal extend
     transient private HiveDecimalWritable resultSum;
     transient private StructObjectInspector soi;
 
-    transient private final Decimal128FastBuffer scratch;
-
     /**
      * The scale of the SUM in the partial output
      */
@@ -120,12 +139,6 @@ public class VectorUDAFAvgDecimal extend
      */
     private short inputPrecision;
 
-    /**
-     * A value used as scratch to avoid allocating at runtime.
-     * Needed by computations like vector[0] * batchSize
-     */
-    transient private Decimal128 scratchDecimal = new Decimal128();
-
     public VectorUDAFAvgDecimal(VectorExpression inputExpression) {
       this();
       this.inputExpression = inputExpression;
@@ -138,7 +151,6 @@ public class VectorUDAFAvgDecimal extend
       resultSum = new HiveDecimalWritable();
       partialResult[0] = resultCount;
       partialResult[1] = resultSum;
-      scratch = new Decimal128FastBuffer();
 
     }
 
@@ -185,7 +197,7 @@ public class VectorUDAFAvgDecimal extend
 
        DecimalColumnVector inputVector = ( DecimalColumnVector)batch.
         cols[this.inputExpression.getOutputColumn()];
-      Decimal128[] vector = inputVector.vector;
+      HiveDecimalWritable[] vector = inputVector.vector;
 
       if (inputVector.noNulls) {
         if (inputVector.isRepeating) {
@@ -231,7 +243,7 @@ public class VectorUDAFAvgDecimal extend
     private void iterateNoNullsRepeatingWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128 value,
+      HiveDecimalWritable value,
       int batchSize) {
 
       for (int i=0; i < batchSize; ++i) {
@@ -239,14 +251,14 @@ public class VectorUDAFAvgDecimal extend
           aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValueWithCheck(value, this.sumScale);
+        myagg.sumValueWithNullCheck(value, this.sumScale);
       }
     }
 
     private void iterateNoNullsSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       int[] selection,
       int batchSize) {
 
@@ -255,28 +267,28 @@ public class VectorUDAFAvgDecimal extend
           aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValueWithCheck(values[selection[i]], this.sumScale);
+        myagg.sumValueWithNullCheck(values[selection[i]], this.sumScale);
       }
     }
 
     private void iterateNoNullsWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       int batchSize) {
       for (int i=0; i < batchSize; ++i) {
         Aggregation myagg = getCurrentAggregationBuffer(
           aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValueWithCheck(values[i], this.sumScale);
+        myagg.sumValueWithNullCheck(values[i], this.sumScale);
       }
     }
 
     private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128 value,
+      HiveDecimalWritable value,
       int batchSize,
       int[] selection,
       boolean[] isNull) {
@@ -287,7 +299,7 @@ public class VectorUDAFAvgDecimal extend
             aggregationBufferSets,
             bufferIndex,
             i);
-          myagg.sumValueWithCheck(value, this.sumScale);
+          myagg.sumValueWithNullCheck(value, this.sumScale);
         }
       }
 
@@ -296,7 +308,7 @@ public class VectorUDAFAvgDecimal extend
     private void iterateHasNullsRepeatingWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128 value,
+      HiveDecimalWritable value,
       int batchSize,
       boolean[] isNull) {
 
@@ -306,7 +318,7 @@ public class VectorUDAFAvgDecimal extend
             aggregationBufferSets,
             bufferIndex,
             i);
-          myagg.sumValueWithCheck(value, this.sumScale);
+          myagg.sumValueWithNullCheck(value, this.sumScale);
         }
       }
     }
@@ -314,7 +326,7 @@ public class VectorUDAFAvgDecimal extend
     private void iterateHasNullsSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       int batchSize,
       int[] selection,
       boolean[] isNull) {
@@ -326,7 +338,7 @@ public class VectorUDAFAvgDecimal extend
             aggregationBufferSets,
             bufferIndex,
             j);
-          myagg.sumValueWithCheck(values[i], this.sumScale);
+          myagg.sumValueWithNullCheck(values[i], this.sumScale);
         }
       }
    }
@@ -334,7 +346,7 @@ public class VectorUDAFAvgDecimal extend
     private void iterateHasNullsWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int bufferIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       int batchSize,
       boolean[] isNull) {
 
@@ -344,7 +356,7 @@ public class VectorUDAFAvgDecimal extend
             aggregationBufferSets,
             bufferIndex,
             i);
-          myagg.sumValueWithCheck(values[i], this.sumScale);
+          myagg.sumValueWithNullCheck(values[i], this.sumScale);
         }
       }
    }
@@ -367,18 +379,31 @@ public class VectorUDAFAvgDecimal extend
 
         Aggregation myagg = (Aggregation)agg;
 
-        Decimal128[] vector = inputVector.vector;
+        HiveDecimalWritable[] vector = inputVector.vector;
 
         if (inputVector.isRepeating) {
           if (inputVector.noNulls) {
             if (myagg.isNull) {
               myagg.isNull = false;
-              myagg.sum.zeroClear();
+              myagg.sum.set(HiveDecimal.ZERO);
               myagg.count = 0;
             }
-            scratchDecimal.update(batchSize);
-            scratchDecimal.multiplyDestructive(vector[0], vector[0].getScale());
-            myagg.sum.update(scratchDecimal);
+            HiveDecimal value = vector[0].getHiveDecimal();
+            HiveDecimal multiple;
+            try {
+              multiple = value.multiply(HiveDecimal.create(batchSize));
+            } catch (ArithmeticException e) {  // catch on overflow
+              myagg.isOutOfRange = true;
+              return;
+            }
+            HiveDecimal result;
+            try {
+              result = myagg.sum.getHiveDecimal().add(multiple);
+            } catch (ArithmeticException e) {  // catch on overflow
+              myagg.isOutOfRange = true;
+              return;
+            }
+            myagg.sum.set(result);
             myagg.count += batchSize;
           }
           return;
@@ -400,7 +425,7 @@ public class VectorUDAFAvgDecimal extend
 
     private void iterateSelectionHasNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         int batchSize,
         boolean[] isNull,
         int[] selected) {
@@ -408,57 +433,57 @@ public class VectorUDAFAvgDecimal extend
       for (int j=0; j< batchSize; ++j) {
         int i = selected[j];
         if (!isNull[i]) {
-          Decimal128 value = vector[i];
-          myagg.sumValueWithCheck(value, this.sumScale);
+          HiveDecimalWritable value = vector[i];
+          myagg.sumValueWithNullCheck(value, this.sumScale);
         }
       }
     }
 
     private void iterateSelectionNoNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         int batchSize,
         int[] selected) {
 
       if (myagg.isNull) {
         myagg.isNull = false;
-        myagg.sum.zeroClear();
+        myagg.sum.set(HiveDecimal.ZERO);
         myagg.count = 0;
       }
 
       for (int i=0; i< batchSize; ++i) {
-        Decimal128 value = vector[selected[i]];
-        myagg.sumValueNoCheck(value, this.sumScale);
+        HiveDecimalWritable value = vector[selected[i]];
+        myagg.sumValueNoNullCheck(value, this.sumScale);
       }
     }
 
     private void iterateNoSelectionHasNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         int batchSize,
         boolean[] isNull) {
 
       for(int i=0;i<batchSize;++i) {
         if (!isNull[i]) {
-          Decimal128 value = vector[i];
-          myagg.sumValueWithCheck(value, this.sumScale);
+          HiveDecimalWritable value = vector[i];
+          myagg.sumValueWithNullCheck(value, this.sumScale);
         }
       }
     }
 
     private void iterateNoSelectionNoNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         int batchSize) {
       if (myagg.isNull) {
         myagg.isNull = false;
-        myagg.sum.zeroClear();
+        myagg.sum.set(HiveDecimal.ZERO);
         myagg.count = 0;
       }
 
       for (int i=0;i<batchSize;++i) {
-        Decimal128 value = vector[i];
-        myagg.sumValueNoCheck(value, this.sumScale);
+        HiveDecimalWritable value = vector[i];
+        myagg.sumValueNoNullCheck(value, this.sumScale);
       }
     }
 
@@ -477,13 +502,13 @@ public class VectorUDAFAvgDecimal extend
     public Object evaluateOutput(
         AggregationBuffer agg) throws HiveException {
       Aggregation myagg = (Aggregation) agg;
-      if (myagg.isNull) {
+      if (myagg.isNull || myagg.isOutOfRange) {
         return null;
       }
       else {
         assert(0 < myagg.count);
         resultCount.set (myagg.count);
-        resultSum.set(HiveDecimal.create(myagg.sum.toBigDecimal()));
+        resultSum.set(myagg.sum.getHiveDecimal());
         return partialResult;
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java Sat Nov  1 20:55:00 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
@@ -29,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.AggregationDesc;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
@@ -48,15 +48,29 @@ public class VectorUDAFSumDecimal extend
 
       private static final long serialVersionUID = 1L;
 
-      transient private Decimal128 sum = new Decimal128();
+      transient private HiveDecimalWritable sum = new HiveDecimalWritable();
       transient private boolean isNull;
 
-      public void sumValue(Decimal128 value, short scale) {
+      // We use this to catch overflow.
+      transient private boolean isOutOfRange;
+
+      public void sumValue(HiveDecimalWritable writable, short scale) {
+        if (isOutOfRange) {
+          return;
+        }
+        HiveDecimal value = writable.getHiveDecimal();
         if (isNull) {
-          sum.update(value, scale);
+          sum.set(value);
           isNull = false;
         } else {
-          sum.addDestructive(value, scale);
+          HiveDecimal result;
+          try {
+            result = sum.getHiveDecimal().add(value);
+          } catch (ArithmeticException e) {  // catch on overflow
+            isOutOfRange = true;
+            return;
+          }
+          sum.set(result);
         }
       }
 
@@ -68,12 +82,13 @@ public class VectorUDAFSumDecimal extend
       @Override
       public void reset() {
         isNull = true;
-        sum.zeroClear();
+        isOutOfRange = false;
+        sum.set(HiveDecimal.ZERO);
       }
     }
 
     private VectorExpression inputExpression;
-    transient private final Decimal128 scratchDecimal;
+    transient private final HiveDecimalWritable scratchDecimal;
 
     public VectorUDAFSumDecimal(VectorExpression inputExpression) {
       this();
@@ -82,7 +97,7 @@ public class VectorUDAFSumDecimal extend
 
     public VectorUDAFSumDecimal() {
       super();
-      scratchDecimal = new Decimal128();
+      scratchDecimal = new HiveDecimalWritable();
     }
 
     private Aggregation getCurrentAggregationBuffer(
@@ -110,7 +125,7 @@ public class VectorUDAFSumDecimal extend
 
       DecimalColumnVector inputVector = (DecimalColumnVector)batch.
         cols[this.inputExpression.getOutputColumn()];
-      Decimal128[] vector = inputVector.vector;
+      HiveDecimalWritable[] vector = inputVector.vector;
 
       if (inputVector.noNulls) {
         if (inputVector.isRepeating) {
@@ -163,7 +178,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateNoNullsRepeatingWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128 value,
+      HiveDecimalWritable value,
       short scale,
       int batchSize) {
 
@@ -179,7 +194,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateNoNullsSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       short scale,
       int[] selection,
       int batchSize) {
@@ -196,7 +211,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateNoNullsWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       short scale,
       int batchSize) {
       for (int i=0; i < batchSize; ++i) {
@@ -211,7 +226,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128 value,
+      HiveDecimalWritable value,
       short scale,
       int batchSize,
       int[] selection,
@@ -232,7 +247,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateHasNullsRepeatingWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128 value,
+      HiveDecimalWritable value,
       short scale,
       int batchSize,
       boolean[] isNull) {
@@ -251,7 +266,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateHasNullsSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       short scale,
       int batchSize,
       int[] selection,
@@ -272,7 +287,7 @@ public class VectorUDAFSumDecimal extend
     private void iterateHasNullsWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
       int aggregateIndex,
-      Decimal128[] values,
+      HiveDecimalWritable[] values,
       short scale,
       int batchSize,
       boolean[] isNull) {
@@ -305,18 +320,34 @@ public class VectorUDAFSumDecimal extend
       }
 
       Aggregation myagg = (Aggregation)agg;
+      if (myagg.isOutOfRange) {
+        return;
+      }
 
-      Decimal128[] vector = inputVector.vector;
+      HiveDecimalWritable[] vector = inputVector.vector;
 
       if (inputVector.isRepeating) {
         if ((inputVector.noNulls) || !inputVector.isNull[0]) {
           if (myagg.isNull) {
             myagg.isNull = false;
-            myagg.sum.zeroClear();
+            myagg.sum.set(HiveDecimal.ZERO);
+          }
+          HiveDecimal value = vector[0].getHiveDecimal();
+          HiveDecimal multiple;
+          try {
+            multiple = value.multiply(HiveDecimal.create(batchSize));
+          } catch (ArithmeticException e) {  // catch on overflow
+            myagg.isOutOfRange = true;
+            return;
           }
-          scratchDecimal.update(batchSize);
-          scratchDecimal.multiplyDestructive(vector[0], inputVector.scale);
-          myagg.sum.addDestructive(scratchDecimal, inputVector.scale);
+          HiveDecimal result;
+          try {
+            result = myagg.sum.getHiveDecimal().add(multiple);
+          } catch (ArithmeticException e) {  // catch on overflow
+            myagg.isOutOfRange = true;
+            return;
+          }
+          myagg.sum.set(result);
         }
         return;
       }
@@ -337,7 +368,7 @@ public class VectorUDAFSumDecimal extend
 
     private void iterateSelectionHasNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         short scale,
         int batchSize,
         boolean[] isNull,
@@ -346,66 +377,94 @@ public class VectorUDAFSumDecimal extend
       for (int j=0; j< batchSize; ++j) {
         int i = selected[j];
         if (!isNull[i]) {
-          Decimal128 value = vector[i];
           if (myagg.isNull) {
             myagg.isNull = false;
-            myagg.sum.zeroClear();
+            myagg.sum.set(HiveDecimal.ZERO);
+          }
+          HiveDecimal value = vector[i].getHiveDecimal();
+          HiveDecimal result;
+          try {
+            result = myagg.sum.getHiveDecimal().add(value);
+          } catch (ArithmeticException e) {  // catch on overflow
+            myagg.isOutOfRange = true;
+            return;
           }
-          myagg.sum.addDestructive(value, scale);
+          myagg.sum.set(result);
         }
       }
     }
 
     private void iterateSelectionNoNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         short scale,
         int batchSize,
         int[] selected) {
 
       if (myagg.isNull) {
-        myagg.sum.zeroClear();
+        myagg.sum.set(HiveDecimal.ZERO);
         myagg.isNull = false;
       }
 
       for (int i=0; i< batchSize; ++i) {
-        Decimal128 value = vector[selected[i]];
-        myagg.sum.addDestructive(value, scale);
+        HiveDecimal value = vector[selected[i]].getHiveDecimal();
+        HiveDecimal result;
+        try {
+          result = myagg.sum.getHiveDecimal().add(value);
+        } catch (ArithmeticException e) {  // catch on overflow
+          myagg.isOutOfRange = true;
+          return;
+        }
+        myagg.sum.set(result);
       }
     }
 
     private void iterateNoSelectionHasNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         short scale,
         int batchSize,
         boolean[] isNull) {
 
       for(int i=0;i<batchSize;++i) {
         if (!isNull[i]) {
-          Decimal128 value = vector[i];
           if (myagg.isNull) {
-            myagg.sum.zeroClear();
+            myagg.sum.set(HiveDecimal.ZERO);
             myagg.isNull = false;
           }
-          myagg.sum.addDestructive(value, scale);
+          HiveDecimal value = vector[i].getHiveDecimal();
+          HiveDecimal result;
+          try {
+            result = myagg.sum.getHiveDecimal().add(value);
+          } catch (ArithmeticException e) {  // catch on overflow
+            myagg.isOutOfRange = true;
+            return;
+          }
+          myagg.sum.set(result);
         }
       }
     }
 
     private void iterateNoSelectionNoNulls(
         Aggregation myagg,
-        Decimal128[] vector,
+        HiveDecimalWritable[] vector,
         short scale,
         int batchSize) {
       if (myagg.isNull) {
-        myagg.sum.zeroClear();
+        myagg.sum.set(HiveDecimal.ZERO);
         myagg.isNull = false;
       }
 
       for (int i=0;i<batchSize;++i) {
-        Decimal128 value = vector[i];
-        myagg.sum.addDestructive(value, scale);
+        HiveDecimal value = vector[i].getHiveDecimal();
+        HiveDecimal result;
+        try {
+          result = myagg.sum.getHiveDecimal().add(value);
+        } catch (ArithmeticException e) {  // catch on overflow
+          myagg.isOutOfRange = true;
+          return;
+        }
+        myagg.sum.set(result);
       }
     }
 
@@ -423,11 +482,11 @@ public class VectorUDAFSumDecimal extend
     @Override
     public Object evaluateOutput(AggregationBuffer agg) throws HiveException {
       Aggregation myagg = (Aggregation) agg;
-      if (myagg.isNull) {
+      if (myagg.isNull || myagg.isOutOfRange) {
         return null;
       }
       else {
-        return HiveDecimal.create(myagg.sum.toBigDecimal());
+        return myagg.sum.getHiveDecimal();
       }
     }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java Sat Nov  1 20:55:00 2014
@@ -329,10 +329,10 @@ public class VectorUDFAdaptor extends Ve
     } else if (outputOI instanceof WritableHiveDecimalObjectInspector) {
       DecimalColumnVector dcv = (DecimalColumnVector) colVec;
       if (value instanceof HiveDecimal) {
-        dcv.vector[i].update(((HiveDecimal) value).bigDecimalValue());
+        dcv.set(i, (HiveDecimal) value);
       } else {
         HiveDecimal hd = ((WritableHiveDecimalObjectInspector) outputOI).getPrimitiveJavaObject(value);
-        dcv.vector[i].update(hd.bigDecimalValue());
+        dcv.set(i, hd);
       }
     } else {
       throw new RuntimeException("Unhandled object type " + outputOI.getTypeName());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Sat Nov  1 20:55:00 2014
@@ -1259,12 +1259,9 @@ class RecordReaderImpl implements Record
         if (!result.isNull[0]) {
           BigInteger bInt = SerializationUtils.readBigInteger(valueStream);
           short scaleInData = (short) scaleStream.next();
-          result.vector[0].update(bInt, scaleInData);
-
-          // Change the scale to match the schema if the scale in data is different.
-          if (scale != scaleInData) {
-            result.vector[0].changeScaleDestructive((short) scale);
-          }
+          HiveDecimal dec = HiveDecimal.create(bInt, scaleInData);
+          dec = HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
+          result.set(0, dec);
         }
       } else {
         // result vector has isNull values set, use the same to read scale vector.
@@ -1273,13 +1270,10 @@ class RecordReaderImpl implements Record
         for (int i = 0; i < batchSize; i++) {
           if (!result.isNull[i]) {
             BigInteger bInt = SerializationUtils.readBigInteger(valueStream);
-            result.vector[i].update(bInt, (short) scratchScaleVector.vector[i]);
-
-            // Change the scale to match the schema if the scale is less than in data.
-            // (HIVE-7373) If scale is bigger, then it leaves the original trailing zeros
-            if (scale < scratchScaleVector.vector[i]) {
-              result.vector[i].changeScaleDestructive((short) scale);
-            }
+            short scaleInData = (short) scratchScaleVector.vector[i];
+            HiveDecimal dec = HiveDecimal.create(bInt, scaleInData);
+            dec = HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
+            result.set(i, dec);
           }
         }
       }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1636047&r1=1636046&r2=1636047&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java Sat Nov  1 20:55:00 2014
@@ -38,7 +38,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromConcat;
@@ -632,9 +631,9 @@ public class TestVectorGroupByOperator {
         "count",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
        3L);
   }
 
@@ -645,28 +644,28 @@ public class TestVectorGroupByOperator {
         "max",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
-       new Decimal128(3));
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
+       HiveDecimal.create(3));
     testAggregateDecimal(
         "Decimal",
         "max",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(3),
-                new Decimal128(2),
-                new Decimal128(1)}),
-        new Decimal128(3));
+                HiveDecimal.create(3),
+                HiveDecimal.create(2),
+                HiveDecimal.create(1)}),
+        HiveDecimal.create(3));
     testAggregateDecimal(
         "Decimal",
         "max",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(2),
-                new Decimal128(3),
-                new Decimal128(1)}),
-        new Decimal128(3));
+                HiveDecimal.create(2),
+                HiveDecimal.create(3),
+                HiveDecimal.create(1)}),
+        HiveDecimal.create(3));
   }
 
   @Test
@@ -676,29 +675,29 @@ public class TestVectorGroupByOperator {
         "min",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
-       new Decimal128(1));
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
+       HiveDecimal.create(1));
     testAggregateDecimal(
         "Decimal",
         "min",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(3),
-                new Decimal128(2),
-                new Decimal128(1)}),
-        new Decimal128(1));
+                HiveDecimal.create(3),
+                HiveDecimal.create(2),
+                HiveDecimal.create(1)}),
+        HiveDecimal.create(1));
 
     testAggregateDecimal(
         "Decimal",
         "min",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(2),
-                new Decimal128(1),
-                new Decimal128(3)}),
-        new Decimal128(1));
+                HiveDecimal.create(2),
+                HiveDecimal.create(1),
+                HiveDecimal.create(3)}),
+        HiveDecimal.create(1));
   }
 
   @Test
@@ -708,10 +707,10 @@ public class TestVectorGroupByOperator {
        "sum",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
-       new Decimal128(1+2+3));
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
+       HiveDecimal.create(1+2+3));
   }
 
   @Test
@@ -722,12 +721,12 @@ public class TestVectorGroupByOperator {
         "sum",
         4,
         Arrays.asList(new Object[]{
-                new Decimal128("1234.2401", scale),
-                new Decimal128("1868.52", scale),
-                new Decimal128(0L, (short) 0),
-                new Decimal128("456.84", scale),
-                new Decimal128("121.89", scale)}),
-       new Decimal128("3681.4901", scale));
+                HiveDecimal.create("1234.2401").setScale(scale),
+                HiveDecimal.create("1868.52").setScale(scale),
+                HiveDecimal.ZERO.setScale(scale),
+                HiveDecimal.create("456.84").setScale(scale),
+                HiveDecimal.create("121.89").setScale(scale)}),
+       HiveDecimal.create("3681.4901").setScale( scale));
   }
 
   @Test
@@ -737,9 +736,9 @@ public class TestVectorGroupByOperator {
         "avg",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
        HiveDecimal.create((1+2+3)/3));
   }
 
@@ -750,9 +749,9 @@ public class TestVectorGroupByOperator {
         "avg",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(-1),
-                new Decimal128(-2),
-                new Decimal128(-3)}),
+                HiveDecimal.create(-1),
+                HiveDecimal.create(-2),
+                HiveDecimal.create(-3)}),
         HiveDecimal.create((-1-2-3)/3));
   }
 
@@ -763,10 +762,10 @@ public class TestVectorGroupByOperator {
         "variance",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) 30);
   }
 
@@ -777,10 +776,10 @@ public class TestVectorGroupByOperator {
         "var_samp",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) 40);
   }
 
@@ -791,10 +790,10 @@ public class TestVectorGroupByOperator {
         "stddev_pop",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) Math.sqrt(30));
   }
 
@@ -805,10 +804,10 @@ public class TestVectorGroupByOperator {
         "stddev_samp",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) Math.sqrt(40));
   }
 
@@ -820,8 +819,8 @@ public class TestVectorGroupByOperator {
             2,
             new String[] {"decimal(38,0)", "bigint"},
             Arrays.asList(new Object[]{
-                    new Decimal128(1),null,
-                    new Decimal128(1), null}),
+                    HiveDecimal.create(1),null,
+                    HiveDecimal.create(1), null}),
             Arrays.asList(new Object[]{13L,null,7L, 19L})),
         buildHashMap(HiveDecimal.create(1), 20L, null, 19L));
   }
@@ -2095,12 +2094,12 @@ public class TestVectorGroupByOperator {
       } else if (arr[0] instanceof HiveDecimalWritable) {
         HiveDecimalWritable hdw = (HiveDecimalWritable) arr[0];
         HiveDecimal hd = hdw.getHiveDecimal();
-        Decimal128 d128 = (Decimal128)expected;
-        assertEquals (key, d128.toBigDecimal(), hd.bigDecimalValue());
+        HiveDecimal expectedDec = (HiveDecimal)expected;
+        assertEquals (key, expectedDec, hd);
       } else if (arr[0] instanceof HiveDecimal) {
           HiveDecimal hd = (HiveDecimal) arr[0];
-          Decimal128 d128 = (Decimal128)expected;
-          assertEquals (key, d128.toBigDecimal(), hd.bigDecimalValue());
+          HiveDecimal expectedDec = (HiveDecimal)expected;
+          assertEquals (key, expectedDec, hd);
       } else {
         Assert.fail("Unsupported result type: " + arr[0].getClass().getName());
       }