You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2014/11/07 22:58:58 UTC

svn commit: r1637469 [1/3] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hadoop/hive/common/type/ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/ ql/src/test/org/apache/hadoop/hive/ql/io...

Author: jdere
Date: Fri Nov  7 21:58:56 2014
New Revision: 1637469

URL: http://svn.apache.org/r1637469
Log:
HIVE-8745: Joins on decimal keys return different results whether they are run as reduce join or map join
- This reverts HIVE-7373

Added:
    hive/trunk/ql/src/test/queries/clientpositive/decimal_join2.q
    hive/trunk/ql/src/test/results/clientpositive/decimal_join2.q.out
Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
    hive/trunk/ql/src/test/results/clientpositive/alter_partition_change_col.q.out
    hive/trunk/ql/src/test/results/clientpositive/avro_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/avro_decimal_native.q.out
    hive/trunk/ql/src/test/results/clientpositive/char_pad_convert.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_4.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_5.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_precision.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_trailing.q.out
    hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
    hive/trunk/ql/src/test/results/clientpositive/parquet_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/parquet_decimal1.q.out
    hive/trunk/ql/src/test/results/clientpositive/serde_regex.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/mapjoin_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/update_all_types.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_between_in.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_data_types.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_4.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_5.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_6.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_cast.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_mapjoin.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_trailing.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/vector_reduce_groupby_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_case.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_when.q.out
    hive/trunk/ql/src/test/results/clientpositive/update_all_types.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_between_in.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_data_types.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_4.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_5.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_6.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_precision.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_decimal_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/windowing_decimal.q.out
    hive/trunk/ql/src/test/results/clientpositive/windowing_navfn.q.out
    hive/trunk/ql/src/test/results/clientpositive/windowing_rank.q.out
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java Fri Nov  7 21:58:56 2014
@@ -103,13 +103,6 @@ public final class Decimal128 extends Nu
   private short scale;
 
   /**
-   * This is the actual scale detected from the value passed to this Decimal128.
-   * The value is always equals or less than #scale. It is used to return the correct
-   * decimal string from {@link #getHiveDecimalString()}.
-   */
-  private short actualScale;
-
-  /**
    * -1 means negative, 0 means zero, 1 means positive.
    *
    * @serial
@@ -134,7 +127,6 @@ public final class Decimal128 extends Nu
     this.unscaledValue = new UnsignedInt128();
     this.scale = 0;
     this.signum = 0;
-    this.actualScale = 0;
   }
 
   /**
@@ -147,7 +139,6 @@ public final class Decimal128 extends Nu
     this.unscaledValue = new UnsignedInt128(o.unscaledValue);
     this.scale = o.scale;
     this.signum = o.signum;
-    this.actualScale = o.actualScale;
   }
 
   /**
@@ -187,7 +178,6 @@ public final class Decimal128 extends Nu
     checkScaleRange(scale);
     this.unscaledValue = new UnsignedInt128(unscaledVal);
     this.scale = scale;
-    this.actualScale = scale;
     if (unscaledValue.isZero()) {
       this.signum = 0;
     } else {
@@ -274,7 +264,6 @@ public final class Decimal128 extends Nu
     this.unscaledValue.update(o.unscaledValue);
     this.scale = o.scale;
     this.signum = o.signum;
-    this.actualScale = o.actualScale;
     return this;
   }
 
@@ -303,7 +292,7 @@ public final class Decimal128 extends Nu
 
   /**
    * Update the value of this object with the given {@code long} with the given
-   * scale.
+   * scal.
    *
    * @param val
    *          {@code long} value to be set to {@code Decimal128}.
@@ -325,8 +314,6 @@ public final class Decimal128 extends Nu
     if (scale != 0) {
       changeScaleDestructive(scale);
     }
-    // set actualScale to 0 because there is no fractional digits on integer values
-    this.actualScale = 0;
     return this;
   }
 
@@ -354,11 +341,6 @@ public final class Decimal128 extends Nu
     checkScaleRange(scale);
     this.scale = scale;
 
-    // Obtains the scale of the double value to keep a record of the original
-    // scale. This will be used to print the HiveDecimal string with the
-    // correct value scale.
-    this.actualScale = (short) BigDecimal.valueOf(val).scale();
-
     // Translate the double into sign, exponent and significand, according
     // to the formulae in JLS, Section 20.10.22.
     long valBits = Double.doubleToLongBits(val);
@@ -382,10 +364,6 @@ public final class Decimal128 extends Nu
       exponent++;
     }
 
-    // Calculate the real number of fractional digits from the double value
-    this.actualScale -= (exponent > 0) ? exponent : 0;
-    this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale;
-
     // so far same as java.math.BigDecimal, but the scaling below is
     // specific to ANSI SQL Numeric.
 
@@ -448,7 +426,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update(IntBuffer buf, int precision) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update(buf, precision);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -465,7 +442,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update128(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update128(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -482,7 +458,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update96(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update96(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -499,7 +474,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update64(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update64(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -516,7 +490,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update32(IntBuffer buf) {
     int scaleAndSignum = buf.get();
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update32(buf);
     assert ((signum == 0) == unscaledValue.isZero());
@@ -537,7 +510,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update(int[] array, int offset, int precision) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update(array, offset + 1, precision);
     return this;
@@ -555,7 +527,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update128(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update128(array, offset + 1);
     return this;
@@ -573,7 +544,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update96(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update96(array, offset + 1);
     return this;
@@ -591,7 +561,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update64(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update64(array, offset + 1);
     return this;
@@ -609,7 +578,6 @@ public final class Decimal128 extends Nu
   public Decimal128 update32(int[] array, int offset) {
     int scaleAndSignum = array[offset];
     this.scale = (short) (scaleAndSignum >> 16);
-    this.actualScale = this.scale;
     this.signum = (byte) (scaleAndSignum & 0xFF);
     this.unscaledValue.update32(array, offset + 1);
     return this;
@@ -632,6 +600,7 @@ public final class Decimal128 extends Nu
    * @param scale
    */
   public Decimal128 update(BigInteger bigInt, short scale) {
+    this.scale = scale;
     this.signum = (byte) bigInt.compareTo(BigInteger.ZERO);
     if (signum == 0) {
       update(0);
@@ -640,9 +609,6 @@ public final class Decimal128 extends Nu
     } else {
       unscaledValue.update(bigInt);
     }
-    this.scale = scale;
-    this.actualScale = scale;
-
     return this;
   }
 
@@ -765,9 +731,6 @@ public final class Decimal128 extends Nu
       this.unscaledValue.addDestructive(accumulated);
     }
 
-    this.actualScale = (short) (fractionalDigits - exponent);
-    this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale;
-
     int scaleAdjust = scale - fractionalDigits + exponent;
     if (scaleAdjust > 0) {
       this.unscaledValue.scaleUpTenDestructive((short) scaleAdjust);
@@ -961,7 +924,6 @@ public final class Decimal128 extends Nu
       this.unscaledValue.scaleUpTenDestructive((short) -scaleDown);
     }
     this.scale = scale;
-    this.actualScale = scale;
 
     this.unscaledValue.throwIfExceedsTenToThirtyEight();
   }
@@ -1163,7 +1125,6 @@ public final class Decimal128 extends Nu
     if (this.signum == 0 || right.signum == 0) {
       this.zeroClear();
       this.scale = newScale;
-      this.actualScale = newScale;
       return;
     }
 
@@ -1193,7 +1154,6 @@ public final class Decimal128 extends Nu
     }
 
     this.scale = newScale;
-    this.actualScale = newScale;
     this.signum = (byte) (this.signum * right.signum);
     if (this.unscaledValue.isZero()) {
       this.signum = 0; // because of scaling down, this could happen
@@ -1284,7 +1244,6 @@ public final class Decimal128 extends Nu
     }
     if (this.signum == 0) {
       this.scale = newScale;
-      this.actualScale = newScale;
       remainder.update(this);
       return;
     }
@@ -1312,7 +1271,6 @@ public final class Decimal128 extends Nu
     }
 
     this.scale = newScale;
-    this.actualScale = newScale;
     this.signum = (byte) (this.unscaledValue.isZero() ? 0
         : (this.signum * right.signum));
     remainder.scale = scale;
@@ -1773,13 +1731,17 @@ public final class Decimal128 extends Nu
   private int [] tmpArray = new int[2];
 
   /**
-   * Returns the string representation of this value. It returns the original
-   * {@code actualScale} fractional part when this value was created. However,
+   * Returns the string representation of this value. It discards the trailing zeros
+   * in the fractional part to match the HiveDecimal's string representation. However,
    * don't use this string representation for the reconstruction of the object.
    *
    * @return string representation of this value
    */
   public String getHiveDecimalString() {
+    if (this.signum == 0) {
+      return "0";
+    }
+
     StringBuilder buf = new StringBuilder(50);
     if (this.signum < 0) {
       buf.append('-');
@@ -1790,40 +1752,32 @@ public final class Decimal128 extends Nu
     int trailingZeros = tmpArray[1];
     int numIntegerDigits = unscaledLength - this.scale;
     if (numIntegerDigits > 0) {
+
       // write out integer part first
       // then write out fractional part
       for (int i=0; i < numIntegerDigits; i++) {
         buf.append(unscaled[i]);
       }
 
-      if (this.actualScale > 0) {
+      if (this.scale > trailingZeros) {
         buf.append('.');
-
-        if (trailingZeros > this.actualScale) {
-          for (int i=0; i < (trailingZeros - this.scale); i++) {
-            buf.append("0");
-          }
-        }
-
-        for (int i = numIntegerDigits; i < (numIntegerDigits + this.actualScale); i++) {
+        for (int i = numIntegerDigits; i < (unscaledLength - trailingZeros); i++) {
           buf.append(unscaled[i]);
         }
       }
     } else {
+
       // no integer part
       buf.append('0');
 
-      if (this.actualScale > 0) {
+      if (this.scale > trailingZeros) {
+
         // fractional part has, starting with zeros
         buf.append('.');
-
-        if (this.actualScale > trailingZeros) {
-          for (int i = unscaledLength; i < this.scale; ++i) {
-            buf.append('0');
-          }
+        for (int i = unscaledLength; i < this.scale; ++i) {
+          buf.append('0');
         }
-
-        for (int i = 0; i < (numIntegerDigits + this.actualScale); i++) {
+        for (int i = 0; i < (unscaledLength - trailingZeros); i++) {
           buf.append(unscaled[i]);
         }
       }
@@ -1882,10 +1836,9 @@ public final class Decimal128 extends Nu
 
   @Override
   public String toString() {
-    return toFormalString() + "(Decimal128: scale=" + scale + ", actualScale="
-        + this.actualScale + ", signum=" + signum + ", BigDecimal.toString="
-        + toBigDecimal().toString() + ", unscaledValue=[" + unscaledValue.toString()
-        + "])";
+    return toFormalString() + "(Decimal128: scale=" + scale + ", signum="
+        + signum + ", BigDecimal.toString=" + toBigDecimal().toString()
+        + ", unscaledValue=[" + unscaledValue.toString() + "])";
   }
 
   /**
@@ -2003,7 +1956,6 @@ public final class Decimal128 extends Nu
    */
   public Decimal128 fastUpdateFromInternalStorage(byte[] internalStorage, short scale) {
     this.scale = scale;
-    this.actualScale = scale;
     this.signum = this.unscaledValue.fastUpdateFromInternalStorage(internalStorage);
 
     return this;

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java Fri Nov  7 21:58:56 2014
@@ -30,6 +30,7 @@ import java.math.RoundingMode;
 public class HiveDecimal implements Comparable<HiveDecimal> {
   public static final int MAX_PRECISION = 38;
   public static final int MAX_SCALE = 38;
+
   /**
    * Default precision/scale when user doesn't specify in the column metadata, such as
    * decimal and decimal(8).
@@ -112,7 +113,7 @@ public class HiveDecimal implements Comp
 
   @Override
   public int hashCode() {
-    return trim(bd).hashCode();
+    return bd.hashCode();
   }
 
   @Override
@@ -168,7 +169,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal multiply(HiveDecimal dec) {
-    return create(bd.multiply(dec.bd), true);
+    return create(bd.multiply(dec.bd), false);
   }
 
   public BigInteger unscaledValue() {
@@ -201,7 +202,7 @@ public class HiveDecimal implements Comp
   }
 
   public HiveDecimal divide(HiveDecimal dec) {
-    return create(trim(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP)), true);
+    return create(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP), true);
   }
 
   /**
@@ -231,6 +232,8 @@ public class HiveDecimal implements Comp
       return null;
     }
 
+    bd = trim(bd);
+
     int intDigits = bd.precision() - bd.scale();
 
     if (intDigits > MAX_PRECISION) {
@@ -241,6 +244,8 @@ public class HiveDecimal implements Comp
     if (bd.scale() > maxScale ) {
       if (allowRounding) {
         bd = bd.setScale(maxScale, RoundingMode.HALF_UP);
+        // Trimming is again necessary, because rounding may introduce new trailing 0's.
+        bd = trim(bd);
       } else {
         bd = null;
       }
@@ -254,6 +259,8 @@ public class HiveDecimal implements Comp
       return null;
     }
 
+    bd = trim(bd);
+
     if (bd.scale() > maxScale) {
       bd = bd.setScale(maxScale, RoundingMode.HALF_UP);
     }

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java Fri Nov  7 21:58:56 2014
@@ -811,7 +811,7 @@ public class TestDecimal128 {
     assertEquals("0.00923076923", d2.getHiveDecimalString());
 
     Decimal128 d3 = new Decimal128("0.00923076000", (short) 15);
-    assertEquals("0.00923076000", d3.getHiveDecimalString());
+    assertEquals("0.00923076", d3.getHiveDecimalString());
 
     Decimal128 d4 = new Decimal128("4294967296.01", (short) 15);
     assertEquals("4294967296.01", d4.getHiveDecimalString());
@@ -849,37 +849,15 @@ public class TestDecimal128 {
     d11.update(hd6.bigDecimalValue());
     assertEquals(hd6.toString(), d11.getHiveDecimalString());
 
-    // The trailing zeros from a double value are trimmed automatically
-    // by the double data type
     Decimal128 d12 = new Decimal128(27.000, (short)3);
-    HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.0"));
+    HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000"));
     assertEquals(hd7.toString(), d12.getHiveDecimalString());
-    assertEquals("27.0", d12.getHiveDecimalString());
+    assertEquals("27", d12.getHiveDecimalString());
 
     Decimal128 d13 = new Decimal128(1234123000, (short)3);
     HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000"));
     assertEquals(hd8.toString(), d13.getHiveDecimalString());
     assertEquals("1234123000", d13.getHiveDecimalString());
-
-    Decimal128 d14 = new Decimal128(1.33e4, (short)10);
-    HiveDecimal hd9 = HiveDecimal.create(new BigDecimal("1.33e4"));
-    assertEquals(hd9.toString(), d14.getHiveDecimalString());
-    assertEquals("13300", d14.getHiveDecimalString());
-
-    Decimal128 d15 = new Decimal128(1.33e-4, (short)10);
-    HiveDecimal hd10 = HiveDecimal.create(new BigDecimal("1.33e-4"));
-    assertEquals(hd10.toString(), d15.getHiveDecimalString());
-    assertEquals("0.000133", d15.getHiveDecimalString());
-
-    Decimal128 d16 = new Decimal128("1.33e4", (short)10);
-    HiveDecimal hd11 = HiveDecimal.create(new BigDecimal("1.33e4"));
-    assertEquals(hd11.toString(), d16.getHiveDecimalString());
-    assertEquals("13300", d16.getHiveDecimalString());
-
-    Decimal128 d17 = new Decimal128("1.33e-4", (short)10);
-    HiveDecimal hd12 = HiveDecimal.create(new BigDecimal("1.33e-4"));
-    assertEquals(hd12.toString(), d17.getHiveDecimalString());
-    assertEquals("0.000133", d17.getHiveDecimalString());
   }
 
   @Test

Modified: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java (original)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java Fri Nov  7 21:58:56 2014
@@ -57,35 +57,27 @@ public class TestHiveDecimal {
     Assert.assertEquals("-1786135888657847525803324040144343378.1", dec.toString());
 
     dec = HiveDecimal.create("005.34000");
-    Assert.assertEquals(dec.precision(), 6);
-    Assert.assertEquals(dec.scale(), 5);
+    Assert.assertEquals(dec.precision(), 3);
+    Assert.assertEquals(dec.scale(), 2);
 
     dec = HiveDecimal.create("178613588865784752580332404014434337809799306448796128931113691624");
     Assert.assertNull(dec);
 
-    // Leaving trailing zeros
-    Assert.assertEquals("0.0", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0"), 2, 1).toString());
-    Assert.assertEquals("0.00", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.00"), 3, 2).toString());
-    Assert.assertEquals("0.0000", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0000"), 10, 4).toString());
-    Assert.assertEquals("100.00000", HiveDecimal.enforcePrecisionScale(new BigDecimal("100.00000"), 15, 5).toString());
-    Assert.assertEquals("100.00", HiveDecimal.enforcePrecisionScale(new BigDecimal("100.00"), 15, 5).toString());
-
-    // Rounding numbers
-    Assert.assertEquals("0.01", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.012"), 3, 2).toString());
-    Assert.assertEquals("0.02", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.015"), 3, 2).toString());
-    Assert.assertEquals("0.01", HiveDecimal.enforcePrecisionScale(new BigDecimal("0.0145"), 3, 2).toString());
-
     // Rounding numbers that increase int digits
     Assert.assertEquals("10",
         HiveDecimal.enforcePrecisionScale(new BigDecimal("9.5"), 2, 0).toString());
     Assert.assertNull(HiveDecimal.enforcePrecisionScale(new BigDecimal("9.5"), 1, 0));
     Assert.assertEquals("9",
         HiveDecimal.enforcePrecisionScale(new BigDecimal("9.4"), 1, 0).toString());
+  }
 
-    // Integers with no scale values are not modified (zeros are not null)
-    Assert.assertEquals("0", HiveDecimal.enforcePrecisionScale(new BigDecimal("0"), 1, 0).toString());
-    Assert.assertEquals("30", HiveDecimal.enforcePrecisionScale(new BigDecimal("30"), 2, 0).toString());
-    Assert.assertEquals("5", HiveDecimal.enforcePrecisionScale(new BigDecimal("5"), 3, 2).toString());
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testTrailingZeroRemovalAfterEnforcement() {
+    String decStr = "8.090000000000000000000000000000000000000123456";
+    HiveDecimal dec = HiveDecimal.create(decStr);
+    Assert.assertEquals("8.09", dec.toString());
   }
 
   @Test
@@ -94,7 +86,7 @@ public class TestHiveDecimal {
   public void testMultiply() {
     HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
     HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
-    Assert.assertNotNull(dec1.multiply(dec2));
+    Assert.assertNull(dec1.multiply(dec2));
 
     dec1 = HiveDecimal.create("178613588865784752580323232232323444.4");
     dec2 = HiveDecimal.create("178613588865784752580302323232.3");
@@ -106,11 +98,11 @@ public class TestHiveDecimal {
 
     dec1 = HiveDecimal.create("3.140");
     dec2 = HiveDecimal.create("1.00");
-    Assert.assertEquals("3.14000", dec1.multiply(dec2).toString());
+    Assert.assertEquals("3.14", dec1.multiply(dec2).toString());
 
     dec1 = HiveDecimal.create("43.010");
     dec2 = HiveDecimal.create("2");
-    Assert.assertEquals("86.020", dec1.multiply(dec2).toString());
+    Assert.assertEquals("86.02", dec1.multiply(dec2).toString());
   }
 
   @Test
@@ -125,7 +117,7 @@ public class TestHiveDecimal {
     Assert.assertNull(dec1);
 
     dec1 = HiveDecimal.create("3.140");
-    Assert.assertEquals("9.859600", dec1.pow(2).toString());
+    Assert.assertEquals("9.8596", dec1.pow(2).toString());
   }
 
   @Test
@@ -155,7 +147,7 @@ public class TestHiveDecimal {
 
     dec1 = HiveDecimal.create("3.140");
     dec2 = HiveDecimal.create("1.00");
-    Assert.assertEquals("4.140", dec1.add(dec2).toString());
+    Assert.assertEquals("4.14", dec1.add(dec2).toString());
   }
 
 
@@ -165,7 +157,7 @@ public class TestHiveDecimal {
   public void testSubtract() {
       HiveDecimal dec1 = HiveDecimal.create("3.140");
       HiveDecimal dec2 = HiveDecimal.create("1.00");
-      Assert.assertEquals("2.140", dec1.subtract(dec2).toString());
+      Assert.assertEquals("2.14", dec1.subtract(dec2).toString());
   }
 
   @Test

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java Fri Nov  7 21:58:56 2014
@@ -38,8 +38,9 @@ public class TestDecimalUtil {
     DecimalUtil.floor(0, d1, dcv);
     Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
+    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
     HiveDecimal d2 = HiveDecimal.create("23.00000");
-    Assert.assertEquals(5, d2.scale());
+    Assert.assertEquals(0, d2.scale());
     HiveDecimal expected2 = HiveDecimal.create("23");
     DecimalUtil.floor(0, d2, dcv);
     Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -50,19 +51,19 @@ public class TestDecimalUtil {
     Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d4 = HiveDecimal.create("-17.00000");
-    Assert.assertEquals(5, d4.scale());
+    Assert.assertEquals(0, d4.scale());
     HiveDecimal expected4 = HiveDecimal.create("-17");
     DecimalUtil.floor(0, d4, dcv);
     Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d5 = HiveDecimal.create("-0.30000");
-    Assert.assertEquals(5, d5.scale());
+    Assert.assertEquals(1, d5.scale());
     HiveDecimal expected5 = HiveDecimal.create("-1");
     DecimalUtil.floor(0, d5, dcv);
     Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d6 = HiveDecimal.create("0.30000");
-    Assert.assertEquals(5, d6.scale());
+    Assert.assertEquals(1, d6.scale());
     HiveDecimal expected6 = HiveDecimal.create("0");
     DecimalUtil.floor(0, d6, dcv);
     Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -76,8 +77,9 @@ public class TestDecimalUtil {
     DecimalUtil.ceiling(0, d1, dcv);
     Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
+    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
     HiveDecimal d2 = HiveDecimal.create("23.00000");
-    Assert.assertEquals(5, d2.scale());
+    Assert.assertEquals(0, d2.scale());
     HiveDecimal expected2 = HiveDecimal.create("23");
     DecimalUtil.ceiling(0, d2, dcv);
     Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -88,19 +90,19 @@ public class TestDecimalUtil {
     Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d4 = HiveDecimal.create("-17.00000");
-    Assert.assertEquals(5, d4.scale());
+    Assert.assertEquals(0, d4.scale());
     HiveDecimal expected4 = HiveDecimal.create("-17");
     DecimalUtil.ceiling(0, d4, dcv);
     Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d5 = HiveDecimal.create("-0.30000");
-    Assert.assertEquals(5, d5.scale());
+    Assert.assertEquals(1, d5.scale());
     HiveDecimal expected5 = HiveDecimal.create("0");
     DecimalUtil.ceiling(0, d5, dcv);
     Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d6 = HiveDecimal.create("0.30000");
-    Assert.assertEquals(5, d6.scale());
+    Assert.assertEquals(1, d6.scale());
     HiveDecimal expected6 = HiveDecimal.create("1");
     DecimalUtil.ceiling(0, d6, dcv);
     Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -127,8 +129,9 @@ public class TestDecimalUtil {
     DecimalUtil.round(0, d1, dcv);
     Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
+    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
     HiveDecimal d2 = HiveDecimal.create("23.00000");
-    Assert.assertEquals(5, d2.scale());
+    Assert.assertEquals(0, d2.scale());
     HiveDecimal expected2 = HiveDecimal.create("23");
     DecimalUtil.round(0, d2, dcv);
     Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -139,7 +142,7 @@ public class TestDecimalUtil {
     Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d4 = HiveDecimal.create("-17.00000");
-    Assert.assertEquals(5, d4.scale());
+    Assert.assertEquals(0, d4.scale());
     HiveDecimal expected4 = HiveDecimal.create("-17");
     DecimalUtil.round(0, d4, dcv);
     Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -163,8 +166,9 @@ public class TestDecimalUtil {
     DecimalUtil.round(0, d1, dcv);
     Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
+    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
     HiveDecimal d2 = HiveDecimal.create("23.56700");
-    Assert.assertEquals(5, d2.scale());
+    Assert.assertEquals(3, d2.scale());
     HiveDecimal expected2 = HiveDecimal.create("23.567");
     DecimalUtil.round(0, d2, dcv);
     Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -175,7 +179,7 @@ public class TestDecimalUtil {
     Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
     HiveDecimal d4 = HiveDecimal.create("-17.23400");
-    Assert.assertEquals(5, d4.scale());
+    Assert.assertEquals(3, d4.scale());
     HiveDecimal expected4 = HiveDecimal.create("-17.234");
     DecimalUtil.round(0, d4, dcv);
     Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -204,8 +208,9 @@ public class TestDecimalUtil {
     DecimalUtil.negate(0, d2, dcv);
     Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
 
+    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
     HiveDecimal d3 = HiveDecimal.create("0.00000");
-    Assert.assertEquals(5, d3.scale());
+    Assert.assertEquals(0, d3.scale());
     HiveDecimal expected3 = HiveDecimal.create("0");
     DecimalUtil.negate(0, d3, dcv);
     Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
@@ -223,7 +228,7 @@ public class TestDecimalUtil {
     Assert.assertEquals(-1, lcv.vector[0]);
 
     HiveDecimal d3 = HiveDecimal.create("0.00000");
-    Assert.assertEquals(5, d3.scale());
+    Assert.assertEquals(0, d3.scale());
     d3.setScale(5);
     DecimalUtil.sign(0, d3, lcv);
     Assert.assertEquals(0, lcv.vector[0]);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java Fri Nov  7 21:58:56 2014
@@ -323,18 +323,19 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     BytesColumnVector r = (BytesColumnVector) b.cols[1];
 
-    byte[] v = toBytes("1.10");
+    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
+    byte[] v = toBytes("1.1");
     assertTrue(((Integer) v.length).toString() + " " + r.length[0], v.length == r.length[0]);
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[0], r.start[0], r.length[0]));
 
-    v = toBytes("-2.20");
+    v = toBytes("-2.2");
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[1], r.start[1], r.length[1]));
 
-    v = toBytes("9999999999999999.00");
+    v = toBytes("9999999999999999");
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[2], r.start[2], r.length[2]));

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java Fri Nov  7 21:58:56 2014
@@ -2830,7 +2830,7 @@ public class TestSearchArgumentImpl {
             .build();
     assertEquals("leaf-0 = (LESS_THAN x 1970-01-11)\n" +
         "leaf-1 = (LESS_THAN_EQUALS y hi)\n" +
-        "leaf-2 = (EQUALS z 1.0)\n" +
+        "leaf-2 = (EQUALS z 1)\n" +
         "expr = (and leaf-0 leaf-1 leaf-2)", sarg.toString());
 
     sarg = SearchArgumentFactory.newBuilder()

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java Fri Nov  7 21:58:56 2014
@@ -187,7 +187,7 @@ public class TestGenericUDFOPDivide exte
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11, 7), oi.getTypeInfo());
     HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
-    Assert.assertEquals(HiveDecimal.create("0.0617100"), res.getHiveDecimal());
+    Assert.assertEquals(HiveDecimal.create("0.06171"), res.getHiveDecimal());
   }
 
   @Test

Added: hive/trunk/ql/src/test/queries/clientpositive/decimal_join2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/decimal_join2.q?rev=1637469&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/decimal_join2.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/decimal_join2.q Fri Nov  7 21:58:56 2014
@@ -0,0 +1,26 @@
+DROP TABLE IF EXISTS DECIMAL_3_txt;
+DROP TABLE IF EXISTS DECIMAL_3;
+
+CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int)
+ROW FORMAT DELIMITED
+   FIELDS TERMINATED BY ' '
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt;
+
+CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt;
+
+set hive.auto.convert.join=false;
+EXPLAIN
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value;
+
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value;
+
+set hive.auto.convert.join=true;
+EXPLAIN
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value;
+
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value;
+
+DROP TABLE DECIMAL_3_txt;
+DROP TABLE DECIMAL_3;

Modified: hive/trunk/ql/src/test/results/clientpositive/alter_partition_change_col.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_partition_change_col.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_partition_change_col.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_partition_change_col.q.out Fri Nov  7 21:58:56 2014
@@ -280,7 +280,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
 #### A masked pattern was here ####
-Beck	0.0	abc	123
+Beck	0	abc	123
 Beck	77.341	abc	123
 Beck	79.9	abc	123
 Cluck	5.96	abc	123
@@ -288,7 +288,7 @@ Mary	33.33	abc	123
 Mary	4.329	abc	123
 Snow	55.71	abc	123
 Tom	-12.25	abc	123
-Tom	19.00	abc	123
+Tom	19	abc	123
 Tom	234.79	abc	123
 PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
@@ -347,7 +347,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
 #### A masked pattern was here ####
-Beck	0.0	abc	123
+Beck	0	abc	123
 Beck	77.341	abc	123
 Beck	79.9	abc	123
 Cluck	5.96	abc	123
@@ -355,7 +355,7 @@ Mary	33.33	abc	123
 Mary	4.329	abc	123
 Snow	55.71	abc	123
 Tom	-12.25	abc	123
-Tom	19.00	abc	123
+Tom	19	abc	123
 Tom	234.79	abc	123
 PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
@@ -367,7 +367,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
 Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
 Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
 Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
@@ -375,7 +375,7 @@ Mary	33.33	__HIVE_DEFAULT_PARTITION__	12
 Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
 Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
 Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
-Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
 Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- Try out replace columns
 alter table alter_partition_change_col1 partition (p1='abc', p2='123') replace columns (c1 string)
@@ -449,7 +449,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
 Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
 Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
 Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
@@ -457,7 +457,7 @@ Mary	33.33	__HIVE_DEFAULT_PARTITION__	12
 Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
 Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
 Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
-Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
 Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: alter table alter_partition_change_col1 replace columns (c1 string)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
@@ -593,7 +593,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
 Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
 Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
 Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
@@ -601,7 +601,7 @@ Mary	33.33	__HIVE_DEFAULT_PARTITION__	12
 Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
 Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
 Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
-Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
 Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc', p2='123') add columns (c2 decimal(14,4))
 PREHOOK: type: ALTERTABLE_ADDCOLS
@@ -638,7 +638,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
 #### A masked pattern was here ####
-Beck	0.0	abc	123
+Beck	0	abc	123
 Beck	77.341	abc	123
 Beck	79.9	abc	123
 Cluck	5.96	abc	123
@@ -646,7 +646,7 @@ Mary	33.33	abc	123
 Mary	4.329	abc	123
 Snow	55.71	abc	123
 Tom	-12.25	abc	123
-Tom	19.00	abc	123
+Tom	19	abc	123
 Tom	234.79	abc	123
 PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
@@ -658,7 +658,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
 Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
 Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
 Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
@@ -666,7 +666,7 @@ Mary	33.33	__HIVE_DEFAULT_PARTITION__	12
 Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
 Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
 Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
-Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
 Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- Try changing column for all partitions at once
 alter table alter_partition_change_col1 partition (p1, p2='123') change column c2 c2 decimal(10,0)

Modified: hive/trunk/ql/src/test/results/clientpositive/avro_decimal.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_decimal.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_decimal.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_decimal.q.out Fri Nov  7 21:58:56 2014
@@ -106,9 +106,9 @@ Mary	4.33
 Cluck	5.96
 Tom	-12.25
 Mary	33.33
-Tom	19.00
-Beck	0.00
-Beck	79.90
+Tom	19
+Beck	0
+Beck	79.9
 PREHOOK: query: DROP TABLE IF EXISTS avro_dec1
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: DROP TABLE IF EXISTS avro_dec1
@@ -175,10 +175,10 @@ POSTHOOK: Input: default@avro_dec1
 77.3
 55.7
 4.3
-6.0
+6
 12.3
 33.3
-19.0
+19
 3.2
 79.9
 PREHOOK: query: DROP TABLE dec

Modified: hive/trunk/ql/src/test/results/clientpositive/avro_decimal_native.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_decimal_native.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_decimal_native.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_decimal_native.q.out Fri Nov  7 21:58:56 2014
@@ -92,9 +92,9 @@ Mary	4.33
 Cluck	5.96
 Tom	-12.25
 Mary	33.33
-Tom	19.00
-Beck	0.00
-Beck	79.90
+Tom	19
+Beck	0
+Beck	79.9
 PREHOOK: query: DROP TABLE IF EXISTS avro_dec1
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: DROP TABLE IF EXISTS avro_dec1
@@ -143,10 +143,10 @@ POSTHOOK: Input: default@avro_dec1
 77.3
 55.7
 4.3
-6.0
+6
 12.3
 33.3
-19.0
+19
 3.2
 79.9
 PREHOOK: query: DROP TABLE dec

Modified: hive/trunk/ql/src/test/results/clientpositive/char_pad_convert.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/char_pad_convert.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/char_pad_convert.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/char_pad_convert.q.out Fri Nov  7 21:58:56 2014
@@ -144,7 +144,7 @@ select lpad(f, 4, ' '),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1k
 #### A masked pattern was here ####
-74.7	42	zzzzzTRUE	20	dd45.40	yard du
+74.7	42	zzzzzTRUE	20	ddd45.4	yard du
 26.4	37	zzzzzTRUE	20	dd29.62	history
 96.9	18	zzzzFALSE	20	dd27.32	history
 13.0	34	zzzzFALSE	20	dd23.91	topolog
@@ -190,7 +190,7 @@ POSTHOOK: query: select rpad(f, 4, ' '),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1k
 #### A masked pattern was here ####
-74.7	42	TRUEzzzzz	20	45.40dd	yard du
+74.7	42	TRUEzzzzz	20	45.4ddd	yard du
 26.4	37	TRUEzzzzz	20	29.62dd	history
 96.9	18	FALSEzzzz	20	27.32dd	history
 13.0	34	FALSEzzzz	20	23.91dd	topolog

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_2.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_2.q.out Fri Nov  7 21:58:56 2014
@@ -264,7 +264,7 @@ POSTHOOK: query: select cast(0.999999999
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2
 #### A masked pattern was here ####
-1.0
+1
 PREHOOK: query: select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_2

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_3.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_3.q.out Fri Nov  7 21:58:56 2014
@@ -33,7 +33,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
 NULL	0
--1234567890.1234567890	-1234567890
+-1234567890.123456789	-1234567890
 -4400	4400
 -1255.49	-1255
 -1.122	-11
@@ -42,7 +42,7 @@ NULL	0
 -0.333	0
 -0.33	0
 -0.3	0
-0.000000000000000000	0
+0	0
 0	0
 0	0
 0.01	0
@@ -53,8 +53,8 @@ NULL	0
 0.33	0
 0.333	0
 1	1
-1.0	1
-1.000000000000000000	1
+1	1
+1	1
 1.12	1
 1.122	1
 2	2
@@ -62,14 +62,14 @@ NULL	0
 3.14	3
 3.14	3
 3.14	3
-3.140	4
+3.14	4
 10	10
 20	20
 100	100
-124.00	124
+124	124
 125.2	125
 200	200
-1234567890.1234567800	1234567890
+1234567890.12345678	1234567890
 PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key DESC, value DESC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -78,14 +78,14 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
-1234567890.1234567800	1234567890
+1234567890.12345678	1234567890
 200	200
 125.2	125
-124.00	124
+124	124
 100	100
 20	20
 10	10
-3.140	4
+3.14	4
 3.14	3
 3.14	3
 3.14	3
@@ -93,8 +93,8 @@ POSTHOOK: Input: default@decimal_3
 2	2
 1.122	1
 1.12	1
-1.000000000000000000	1
-1.0	1
+1	1
+1	1
 1	1
 0.333	0
 0.33	0
@@ -105,7 +105,7 @@ POSTHOOK: Input: default@decimal_3
 0.01	0
 0	0
 0	0
-0.000000000000000000	0
+0	0
 -0.3	0
 -0.33	0
 -0.333	0
@@ -114,7 +114,7 @@ POSTHOOK: Input: default@decimal_3
 -1.122	-11
 -1255.49	-1255
 -4400	4400
--1234567890.1234567890	-1234567890
+-1234567890.123456789	-1234567890
 NULL	0
 PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value
 PREHOOK: type: QUERY
@@ -125,7 +125,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
 NULL	0
--1234567890.1234567890	-1234567890
+-1234567890.123456789	-1234567890
 -4400	4400
 -1255.49	-1255
 -1.122	-11
@@ -134,7 +134,7 @@ NULL	0
 -0.333	0
 -0.33	0
 -0.3	0
-0.000000000000000000	0
+0	0
 0	0
 0	0
 0.01	0
@@ -145,8 +145,8 @@ NULL	0
 0.33	0
 0.333	0
 1	1
-1.0	1
-1.000000000000000000	1
+1	1
+1	1
 1.12	1
 1.122	1
 2	2
@@ -154,14 +154,14 @@ NULL	0
 3.14	3
 3.14	3
 3.14	3
-3.140	4
+3.14	4
 10	10
 20	20
 100	100
-124.00	124
+124	124
 125.2	125
 200	200
-1234567890.1234567800	1234567890
+1234567890.12345678	1234567890
 PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -171,7 +171,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
 NULL
--1234567890.1234567890
+-1234567890.123456789
 -4400
 -1255.49
 -1.122
@@ -179,7 +179,7 @@ NULL
 -0.333
 -0.33
 -0.3
-0.000000000000000000
+0
 0.01
 0.02
 0.1
@@ -195,10 +195,10 @@ NULL
 10
 20
 100
-124.00
+124
 125.2
 200
-1234567890.1234567800
+1234567890.12345678
 PREHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -208,7 +208,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
 NULL	0
--1234567890.1234567890	-1234567890
+-1234567890.123456789	-1234567890
 -4400	4400
 -1255.49	-1255
 -1.122	-11
@@ -216,7 +216,7 @@ NULL	0
 -0.333	0
 -0.33	0
 -0.3	0
-0.000000000000000000	0
+0	0
 0.01	0
 0.02	0
 0.1	0
@@ -232,10 +232,10 @@ NULL	0
 10	10
 20	20
 100	100
-124.00	124
+124	124
 125.2	125
 200	200
-1234567890.1234567800	1234567890
+1234567890.12345678	1234567890
 PREHOOK: query: SELECT value, sum(key) FROM DECIMAL_3 GROUP BY value ORDER BY value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -244,23 +244,23 @@ POSTHOOK: query: SELECT value, sum(key) 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
--1234567890	-1234567890.1234567890
+-1234567890	-1234567890.123456789
 -1255	-1255.49
 -11	-1.122
 -1	-2.24
-0	0.330000000000000000
-1	5.242000000000000000
+0	0.33
+1	5.242
 2	4
 3	9.42
-4	3.140
+4	3.14
 10	10
 20	20
 100	100
-124	124.00
+124	124
 125	125.2
 200	200
 4400	-4400
-1234567890	1234567890.1234567800
+1234567890	1234567890.12345678
 PREHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -269,7 +269,7 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_3
 #### A masked pattern was here ####
--1234567890.1234567890	-1234567890	-1234567890.1234567890	-1234567890
+-1234567890.123456789	-1234567890	-1234567890.123456789	-1234567890
 -4400	4400	-4400	4400
 -1255.49	-1255	-1255.49	-1255
 -1.122	-11	-1.122	-11
@@ -280,7 +280,11 @@ POSTHOOK: Input: default@decimal_3
 -0.333	0	-0.333	0
 -0.33	0	-0.33	0
 -0.3	0	-0.3	0
-0.000000000000000000	0	0.000000000000000000	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
 0	0	0	0
 0	0	0	0
 0	0	0	0
@@ -293,8 +297,14 @@ POSTHOOK: Input: default@decimal_3
 0.33	0	0.33	0
 0.333	0	0.333	0
 1	1	1	1
-1.0	1	1.0	1
-1.000000000000000000	1	1.000000000000000000	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
 1.12	1	1.12	1
 1.122	1	1.122	1
 2	2	2	2
@@ -310,14 +320,20 @@ POSTHOOK: Input: default@decimal_3
 3.14	3	3.14	3
 3.14	3	3.14	3
 3.14	3	3.14	3
-3.140	4	3.140	4
+3.14	3	3.14	4
+3.14	3	3.14	4
+3.14	3	3.14	4
+3.14	4	3.14	3
+3.14	4	3.14	3
+3.14	4	3.14	3
+3.14	4	3.14	4
 10	10	10	10
 20	20	20	20
 100	100	100	100
-124.00	124	124.00	124
+124	124	124	124
 125.2	125	125.2	125
 200	200	200	200
-1234567890.1234567800	1234567890	1234567890.1234567800	1234567890
+1234567890.12345678	1234567890	1234567890.12345678	1234567890
 PREHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.14 ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -329,7 +345,7 @@ POSTHOOK: Input: default@decimal_3
 3.14	3
 3.14	3
 3.14	3
-3.140	4
+3.14	4
 PREHOOK: query: SELECT * FROM DECIMAL_3 WHERE key=3.140 ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_3
@@ -341,7 +357,7 @@ POSTHOOK: Input: default@decimal_3
 3.14	3
 3.14	3
 3.14	3
-3.140	4
+3.14	4
 PREHOOK: query: DROP TABLE DECIMAL_3
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@decimal_3

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_4.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_4.q.out Fri Nov  7 21:58:56 2014
@@ -57,7 +57,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_4_1
 #### A masked pattern was here ####
 NULL	0
--1234567890.1234567890	-1234567890
+-1234567890.123456789	-1234567890
 -4400	4400
 -1255.49	-1255
 -1.122	-11
@@ -66,7 +66,7 @@ NULL	0
 -0.333	0
 -0.33	0
 -0.3	0
-0.0000000000000000000000000	0
+0	0
 0	0
 0	0
 0.01	0
@@ -78,7 +78,7 @@ NULL	0
 0.333	0
 0.9999999999999999999999999	1
 1	1
-1.0	1
+1	1
 1.12	1
 1.122	1
 2	2
@@ -86,14 +86,14 @@ NULL	0
 3.14	3
 3.14	3
 3.14	3
-3.140	4
+3.14	4
 10	10
 20	20
 100	100
-124.00	124
+124	124
 125.2	125
 200	200
-1234567890.1234567800	1234567890
+1234567890.12345678	1234567890
 PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_4_2
@@ -103,7 +103,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_4_2
 #### A masked pattern was here ####
 NULL	NULL
--1234567890.1234567890	-3703703670.3703703670
+-1234567890.123456789	-3703703670.370370367
 -4400	-13200
 -1255.49	-3766.47
 -1.122	-3.366
@@ -112,7 +112,7 @@ NULL	NULL
 -0.333	-0.999
 -0.33	-0.99
 -0.3	-0.9
-0.0000000000000000000000000	0.0000000000000000000000000
+0	0
 0	0
 0	0
 0.01	0.03
@@ -124,7 +124,7 @@ NULL	NULL
 0.333	0.999
 0.9999999999999999999999999	2.9999999999999999999999997
 1	3
-1.0	3.0
+1	3
 1.12	3.36
 1.122	3.366
 2	6
@@ -132,14 +132,14 @@ NULL	NULL
 3.14	9.42
 3.14	9.42
 3.14	9.42
-3.140	9.420
+3.14	9.42
 10	30
 20	60
 100	300
-124.00	372.00
+124	372
 125.2	375.6
 200	600
-1234567890.1234567800	3703703670.3703703400
+1234567890.12345678	3703703670.37037034
 PREHOOK: query: DROP TABLE DECIMAL_4_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@decimal_4_1

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_5.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_5.q.out Fri Nov  7 21:58:56 2014
@@ -43,7 +43,7 @@ NULL
 -0.333
 -0.33
 -0.3
-0.00000
+0
 0
 0
 0.01
@@ -54,8 +54,8 @@ NULL
 0.33
 0.333
 1
-1.0
-1.00000
+1
+1
 1.12
 1.122
 2
@@ -63,11 +63,11 @@ NULL
 3.14
 3.14
 3.14
-3.140
+3.14
 10
 20
 100
-124.00
+124
 125.2
 200
 PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key
@@ -86,7 +86,7 @@ NULL
 -0.333
 -0.33
 -0.3
-0.00000
+0
 0.01
 0.02
 0.1
@@ -102,7 +102,7 @@ NULL
 10
 20
 100
-124.00
+124
 125.2
 200
 PREHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5
@@ -161,7 +161,7 @@ POSTHOOK: Input: default@decimal_5
 #### A masked pattern was here ####
 NULL
 NULL
-0.000
+0
 0
 100
 10
@@ -180,7 +180,7 @@ NULL
 -0.3
 -0.33
 -0.333
-1.0
+1
 2
 3.14
 -1.12
@@ -188,13 +188,13 @@ NULL
 -1.122
 1.12
 1.122
-124.00
+124
 125.2
 NULL
 3.14
 3.14
-3.140
-1.000
+3.14
+1
 NULL
 NULL
 PREHOOK: query: DROP TABLE DECIMAL_5

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_6.q.out Fri Nov  7 21:58:56 2014
@@ -91,16 +91,16 @@ NULL
 -0.333
 -0.3
 -0.3
-0.00000
-0.0000
+0
+0
 0
 0
 0.333
 0.333
-1.0
-1.0
-1.0000
-1.00000
+1
+1
+1
+1
 1.12
 1.12
 1.122
@@ -111,14 +111,14 @@ NULL
 3.14
 3.14
 3.14
-3.140
-3.140
+3.14
+3.14
 10
 10
 10.7343
 10.73433
-124.00
-124.00
+124
+124
 125.2
 125.2
 23232.23435

Added: hive/trunk/ql/src/test/results/clientpositive/decimal_join2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_join2.q.out?rev=1637469&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_join2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_join2.q.out Fri Nov  7 21:58:56 2014
@@ -0,0 +1,371 @@
+PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_3_txt
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_3_txt
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS DECIMAL_3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int)
+ROW FORMAT DELIMITED
+   FIELDS TERMINATED BY ' '
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DECIMAL_3_txt
+POSTHOOK: query: CREATE TABLE DECIMAL_3_txt(key decimal(38,18), value int)
+ROW FORMAT DELIMITED
+   FIELDS TERMINATED BY ' '
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DECIMAL_3_txt
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@decimal_3_txt
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_3_txt
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@decimal_3_txt
+PREHOOK: query: CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@decimal_3_txt
+PREHOOK: Output: database:default
+PREHOOK: Output: default@DECIMAL_3
+POSTHOOK: query: CREATE TABLE DECIMAL_3 STORED AS ORC AS SELECT * FROM DECIMAL_3_txt
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@decimal_3_txt
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@DECIMAL_3
+PREHOOK: query: EXPLAIN
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: key (type: decimal(38,18))
+                sort order: +
+                Map-reduce partition columns: key (type: decimal(38,18))
+                Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE
+                value expressions: value (type: int)
+          TableScan
+            alias: a
+            Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: key (type: decimal(38,18))
+                sort order: +
+                Map-reduce partition columns: key (type: decimal(38,18))
+                Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE
+                value expressions: value (type: int)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {KEY.reducesinkkey0} {VALUE._col0}
+            1 {KEY.reducesinkkey0} {VALUE._col0}
+          outputColumnNames: _col0, _col1, _col5, _col6
+          Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: decimal(38,18)), _col1 (type: int), _col5 (type: decimal(38,18)), _col6 (type: int)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: decimal(38,18)), _col1 (type: int), _col2 (type: decimal(38,18)), _col3 (type: int)
+              sort order: ++++
+              Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: decimal(38,18)), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: decimal(38,18)), KEY.reducesinkkey3 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_3
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_3
+#### A masked pattern was here ####
+-1234567890.123456789	-1234567890	-1234567890.123456789	-1234567890
+-4400	4400	-4400	4400
+-1255.49	-1255	-1255.49	-1255
+-1.122	-11	-1.122	-11
+-1.12	-1	-1.12	-1
+-1.12	-1	-1.12	-1
+-1.12	-1	-1.12	-1
+-1.12	-1	-1.12	-1
+-0.333	0	-0.333	0
+-0.33	0	-0.33	0
+-0.3	0	-0.3	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0.01	0	0.01	0
+0.02	0	0.02	0
+0.1	0	0.1	0
+0.2	0	0.2	0
+0.3	0	0.3	0
+0.33	0	0.33	0
+0.333	0	0.333	0
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1.12	1	1.12	1
+1.122	1	1.122	1
+2	2	2	2
+2	2	2	2
+2	2	2	2
+2	2	2	2
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	4
+3.14	3	3.14	4
+3.14	3	3.14	4
+3.14	4	3.14	3
+3.14	4	3.14	3
+3.14	4	3.14	3
+3.14	4	3.14	4
+10	10	10	10
+20	20	20	20
+100	100	100	100
+124	124	124	124
+125.2	125	125.2	125
+200	200	200	200
+1234567890.12345678	1234567890	1234567890.12345678	1234567890
+PREHOOK: query: EXPLAIN
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-5 is a root stage
+  Stage-2 depends on stages: Stage-5
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-5
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        a 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        a 
+          TableScan
+            alias: a
+            Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                condition expressions:
+                  0 {value}
+                  1 {key} {value}
+                keys:
+                  0 key (type: decimal(38,18))
+                  1 key (type: decimal(38,18))
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 19 Data size: 2148 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                condition expressions:
+                  0 {key} {value}
+                  1 {key} {value}
+                keys:
+                  0 key (type: decimal(38,18))
+                  1 key (type: decimal(38,18))
+                outputColumnNames: _col0, _col1, _col5, _col6
+                Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: decimal(38,18)), _col1 (type: int), _col5 (type: decimal(38,18)), _col6 (type: int)
+                  outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: decimal(38,18)), _col1 (type: int), _col2 (type: decimal(38,18)), _col3 (type: int)
+                    sort order: ++++
+                    Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+      Local Work:
+        Map Reduce Local Work
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: decimal(38,18)), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: decimal(38,18)), KEY.reducesinkkey3 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 20 Data size: 2362 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_3
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM DECIMAL_3 a JOIN DECIMAL_3 b ON (a.key = b.key) ORDER BY a.key, a.value, b.key, b.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_3
+#### A masked pattern was here ####
+-1234567890.123456789	-1234567890	-1234567890.123456789	-1234567890
+-4400	4400	-4400	4400
+-1255.49	-1255	-1255.49	-1255
+-1.122	-11	-1.122	-11
+-1.12	-1	-1.12	-1
+-1.12	-1	-1.12	-1
+-1.12	-1	-1.12	-1
+-1.12	-1	-1.12	-1
+-0.333	0	-0.333	0
+-0.33	0	-0.33	0
+-0.3	0	-0.3	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0	0	0	0
+0.01	0	0.01	0
+0.02	0	0.02	0
+0.1	0	0.1	0
+0.2	0	0.2	0
+0.3	0	0.3	0
+0.33	0	0.33	0
+0.333	0	0.333	0
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1	1	1	1
+1.12	1	1.12	1
+1.122	1	1.122	1
+2	2	2	2
+2	2	2	2
+2	2	2	2
+2	2	2	2
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	3
+3.14	3	3.14	4
+3.14	3	3.14	4
+3.14	3	3.14	4
+3.14	4	3.14	3
+3.14	4	3.14	3
+3.14	4	3.14	3
+3.14	4	3.14	4
+10	10	10	10
+20	20	20	20
+100	100	100	100
+124	124	124	124
+125.2	125	125.2	125
+200	200	200	200
+1234567890.12345678	1234567890	1234567890.12345678	1234567890
+PREHOOK: query: DROP TABLE DECIMAL_3_txt
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@decimal_3_txt
+PREHOOK: Output: default@decimal_3_txt
+POSTHOOK: query: DROP TABLE DECIMAL_3_txt
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@decimal_3_txt
+POSTHOOK: Output: default@decimal_3_txt
+PREHOOK: query: DROP TABLE DECIMAL_3
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@decimal_3
+PREHOOK: Output: default@decimal_3
+POSTHOOK: query: DROP TABLE DECIMAL_3
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@decimal_3
+POSTHOOK: Output: default@decimal_3

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_precision.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_precision.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_precision.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_precision.q.out Fri Nov  7 21:58:56 2014
@@ -76,13 +76,13 @@ NULL
 NULL
 NULL
 NULL
-0.0000000000
-0.0000000000
-0.0000000000
-0.0000000000
 0
-0.1234567890
-0.1234567890
+0
+0
+0
+0
+0.123456789
+0.123456789
 1.2345678901
 1.2345678901
 1.2345678901
@@ -106,7 +106,7 @@ NULL
 123456789.0123456
 123456789.0123456789
 1234567890.123456
-1234567890.1234567890
+1234567890.123456789
 PREHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_precision
@@ -159,13 +159,13 @@ NULL	NULL	NULL
 NULL	NULL	NULL
 NULL	NULL	NULL
 NULL	NULL	NULL
-0.0000000000	1.0000000000	-1.0000000000
-0.0000000000	1.0000000000	-1.0000000000
-0.0000000000	1.0000000000	-1.0000000000
-0.0000000000	1.0000000000	-1.0000000000
 0	1	-1
-0.1234567890	1.1234567890	-0.8765432110
-0.1234567890	1.1234567890	-0.8765432110
+0	1	-1
+0	1	-1
+0	1	-1
+0	1	-1
+0.123456789	1.123456789	-0.876543211
+0.123456789	1.123456789	-0.876543211
 1.2345678901	2.2345678901	0.2345678901
 1.2345678901	2.2345678901	0.2345678901
 1.2345678901	2.2345678901	0.2345678901
@@ -189,7 +189,7 @@ NULL	NULL	NULL
 123456789.0123456	123456790.0123456	123456788.0123456
 123456789.0123456789	123456790.0123456789	123456788.0123456789
 1234567890.123456	1234567891.123456	1234567889.123456
-1234567890.1234567890	1234567891.1234567890	1234567889.1234567890
+1234567890.123456789	1234567891.123456789	1234567889.123456789
 PREHOOK: query: SELECT dec, dec * 2, dec / 3  FROM DECIMAL_PRECISION ORDER BY dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_precision
@@ -242,13 +242,13 @@ NULL	NULL	NULL
 NULL	NULL	NULL
 NULL	NULL	NULL
 NULL	NULL	NULL
-0.0000000000	0.0000000000	0
-0.0000000000	0.0000000000	0
-0.0000000000	0.0000000000	0
-0.0000000000	0.0000000000	0
 0	0	0
-0.1234567890	0.2469135780	0.041152263
-0.1234567890	0.2469135780	0.041152263
+0	0	0
+0	0	0
+0	0	0
+0	0	0
+0.123456789	0.246913578	0.041152263
+0.123456789	0.246913578	0.041152263
 1.2345678901	2.4691357802	0.411522630033
 1.2345678901	2.4691357802	0.411522630033
 1.2345678901	2.4691357802	0.411522630033
@@ -258,9 +258,9 @@ NULL	NULL	NULL
 123.4567890123	246.9135780246	41.1522630041
 123.4567890123	246.9135780246	41.1522630041
 123.4567890123	246.9135780246	41.1522630041
-1234.5678901235	2469.1357802470	411.522630041167
-1234.5678901235	2469.1357802470	411.522630041167
-1234.5678901235	2469.1357802470	411.522630041167
+1234.5678901235	2469.135780247	411.522630041167
+1234.5678901235	2469.135780247	411.522630041167
+1234.5678901235	2469.135780247	411.522630041167
 12345.6789012346	24691.3578024692	4115.226300411533
 12345.6789012346	24691.3578024692	4115.226300411533
 123456.7890123456	246913.5780246912	41152.2630041152
@@ -272,7 +272,7 @@ NULL	NULL	NULL
 123456789.0123456	246913578.0246912	41152263.0041152
 123456789.0123456789	246913578.0246913578	41152263.0041152263
 1234567890.123456	2469135780.246912	411522630.041152
-1234567890.1234567890	2469135780.2469135780	411522630.041152263
+1234567890.123456789	2469135780.246913578	411522630.041152263
 PREHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_precision
@@ -325,13 +325,13 @@ NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
-0.0000000000	0
-0.0000000000	0
-0.0000000000	0
-0.0000000000	0
 0	0
-0.1234567890	0.013717421
-0.1234567890	0.013717421
+0	0
+0	0
+0	0
+0	0
+0.123456789	0.013717421
+0.123456789	0.013717421
 1.2345678901	0.137174210011
 1.2345678901	0.137174210011
 1.2345678901	0.137174210011
@@ -355,7 +355,7 @@ NULL	NULL
 123456789.0123456	13717421.001371733333
 123456789.0123456789	13717421.0013717421
 1234567890.123456	137174210.013717333333
-1234567890.1234567890	137174210.013717421
+1234567890.123456789	137174210.013717421
 PREHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_precision
@@ -408,13 +408,13 @@ NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
-0.0000000000	0
-0.0000000000	0
-0.0000000000	0
-0.0000000000	0
 0	0
-0.1234567890	0.0045724736667
-0.1234567890	0.0045724736667
+0	0
+0	0
+0	0
+0	0
+0.123456789	0.0045724736667
+0.123456789	0.0045724736667
 1.2345678901	0.0457247366704
 1.2345678901	0.0457247366704
 1.2345678901	0.0457247366704
@@ -438,7 +438,7 @@ NULL	NULL
 123456789.0123456	4572473.6671239111111
 123456789.0123456789	4572473.6671239140333
 1234567890.123456	45724736.6712391111111
-1234567890.1234567890	45724736.6712391403333
+1234567890.123456789	45724736.6712391403333
 PREHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_precision
@@ -491,13 +491,13 @@ NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
-0.0000000000	0.00000000000000000000
-0.0000000000	0.00000000000000000000
-0.0000000000	0.00000000000000000000
-0.0000000000	0.00000000000000000000
 0	0
-0.1234567890	0.01524157875019052100
-0.1234567890	0.01524157875019052100
+0	0
+0	0
+0	0
+0	0
+0.123456789	0.015241578750190521
+0.123456789	0.015241578750190521
 1.2345678901	1.52415787526596567801
 1.2345678901	1.52415787526596567801
 1.2345678901	1.52415787526596567801
@@ -521,7 +521,7 @@ NULL	NULL
 123456789.0123456	15241578753238817.26870921383936
 123456789.0123456789	15241578753238836.75019051998750190521
 1234567890.123456	NULL
-1234567890.1234567890	NULL
+1234567890.123456789	NULL
 PREHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_trailing.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_trailing.q.out?rev=1637469&r1=1637468&r2=1637469&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_trailing.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_trailing.q.out Fri Nov  7 21:58:56 2014
@@ -43,13 +43,13 @@ POSTHOOK: Input: default@decimal_trailin
 0	0	0
 1	0	0
 2	NULL	NULL
-3	1.0000	1.00000000
-4	10.0000	10.00000000
-5	100.0000	100.00000000
-6	1000.0000	1000.00000000
-7	10000.0000	10000.00000000
-8	100000.0000	100000.00000000
-9	NULL	1000000.00000000
+3	1	1
+4	10	10
+5	100	100
+6	1000	1000
+7	10000	10000
+8	100000	100000
+9	NULL	1000000
 10	NULL	NULL
 11	NULL	NULL
 12	NULL	NULL
@@ -58,18 +58,18 @@ POSTHOOK: Input: default@decimal_trailin
 15	NULL	NULL
 16	NULL	NULL
 17	NULL	NULL
-18	1.0000	1.00000000
-19	10.000	10.0000000
-20	100.00	100.000000
-21	1000.0	1000.00000
-22	100000	10000.0000
-23	0.0000	0.00000000
-24	0.000	0.0000000
-25	0.00	0.000000
-26	0.0	0.00000
-27	0	0.00000
-28	12313.2000	134134.31252500
-29	99999.9990	134134.31242553
+18	1	1
+19	10	10
+20	100	100
+21	1000	1000
+22	100000	10000
+23	0	0
+24	0	0
+25	0	0
+26	0	0
+27	0	0
+28	12313.2	134134.312525
+29	99999.999	134134.31242553
 PREHOOK: query: DROP TABLE DECIMAL_TRAILING
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@decimal_trailing