You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2016/12/22 08:32:36 UTC

[07/10] hive git commit: HIVE-15335: Fast Decimal (Matt McCline, reviewed by Sergey Shelukhin, Prasanth Jayachandran, Owen O'Malley)

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index 5e119d7..0a035c6 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -29,6 +29,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveDecimalV1;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
@@ -797,18 +798,6 @@ public class BinarySortableSerDe extends AbstractSerDe {
         return;
       }
       case DECIMAL: {
-        // decimals are encoded in three pieces:
-        // sign: 1, 2 or 3 for smaller, equal or larger than 0 respectively
-        // factor: Number that indicates the amount of digits you have to move
-        // the decimal point left or right until the resulting number is smaller
-        // than zero but has something other than 0 as the first digit.
-        // digits: which is a string of all the digits in the decimal. If the number
-        // is negative the binary string will be inverted to get the correct ordering.
-        // Example: 0.00123
-        // Sign is 3 (bigger than 0)
-        // Factor is -2 (move decimal point 2 positions right)
-        // Digits are: 123
-
         HiveDecimalObjectInspector boi = (HiveDecimalObjectInspector) poi;
         HiveDecimal dec = boi.getPrimitiveJavaObject(o);
         serializeHiveDecimal(buffer, dec, invert);
@@ -980,22 +969,22 @@ public class BinarySortableSerDe extends AbstractSerDe {
     serializeInt(buffer, nanos, invert);
   }
 
-  public static void serializeHiveDecimal(ByteStream.Output buffer, HiveDecimal dec, boolean invert) {
+  public static void serializeOldHiveDecimal(ByteStream.Output buffer, HiveDecimalV1 oldDec, boolean invert) {
     // get the sign of the big decimal
-    int sign = dec.compareTo(HiveDecimal.ZERO);
+    int sign = oldDec.compareTo(HiveDecimalV1.ZERO);
 
     // we'll encode the absolute value (sign is separate)
-    dec = dec.abs();
+    oldDec = oldDec.abs();
 
     // get the scale factor to turn big decimal into a decimal < 1
     // This relies on the BigDecimal precision value, which as of HIVE-10270
     // is now different from HiveDecimal.precision()
-    int factor = dec.bigDecimalValue().precision() - dec.bigDecimalValue().scale();
+    int factor = oldDec.bigDecimalValue().precision() - oldDec.bigDecimalValue().scale();
     factor = sign == 1 ? factor : -factor;
 
     // convert the absolute big decimal to string
-    dec.scaleByPowerOfTen(Math.abs(dec.scale()));
-    String digits = dec.unscaledValue().toString();
+    oldDec.scaleByPowerOfTen(Math.abs(oldDec.scale()));
+    String digits = oldDec.unscaledValue().toString();
 
     // finally write out the pieces (sign, scale, digits)
     writeByte(buffer, (byte) ( sign + 1), invert);
@@ -1007,6 +996,119 @@ public class BinarySortableSerDe extends AbstractSerDe {
         digits.length(), sign == -1 ? !invert : invert);
   }
 
+  // See comments for next method.
+  public static void serializeHiveDecimal(ByteStream.Output buffer, HiveDecimal dec, boolean invert) {
+
+    byte[] scratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
+    serializeHiveDecimal(buffer, dec, invert, scratchBuffer);
+  }
+
+  /**
+   * Decimals are encoded in three pieces:Decimals are encoded in three pieces:
+   *
+   * Sign:   1, 2 or 3 for smaller, equal or larger than 0 respectively
+   * Factor: Number that indicates the amount of digits you have to move
+   *         the decimal point left or right until the resulting number is smaller
+   *         than zero but has something other than 0 as the first digit.
+   * Digits: which is a string of all the digits in the decimal. If the number
+   *         is negative the binary string will be inverted to get the correct ordering.
+   *
+   * UNDONE: Is this example correct?
+   *   Example: 0.00123
+   *   Sign is 3 (bigger than 0)
+   *   Factor is -2 (move decimal point 2 positions right)
+   *   Digits are: 123
+   *
+   * @param buffer
+   * @param dec
+   * @param invert
+   * @param scratchBuffer
+   */
+  public static void serializeHiveDecimal(
+    ByteStream.Output buffer, HiveDecimal dec, boolean invert,
+    byte[] scratchBuffer) {
+
+    // Get the sign of the decimal.
+    int signum = dec.signum();
+
+    // Get the 10^N power to turn digits into the desired decimal with a possible
+    // fractional part.
+    // To be compatible with the OldHiveDecimal version, zero has factor 1.
+    int factor;
+    if (signum == 0) {
+      factor = 1;
+    } else {
+      factor = dec.rawPrecision() - dec.scale();
+    }
+
+    // To make comparisons work properly, the "factor" gets the decimal's sign, too.
+    factor = signum == 1 ? factor : -factor;
+
+    // Convert just the decimal digits (no dot, sign, etc) into bytes.
+    //
+    // This is much faster than converting the BigInteger value from unscaledValue() which is no
+    // longer part of the HiveDecimal representation anymore to string, then bytes.
+    int index = dec.toDigitsOnlyBytes(scratchBuffer);
+
+    /*
+     * Finally write out the pieces (sign, power, digits)
+     */
+    writeByte(buffer, (byte) ( signum + 1), invert);
+    writeByte(buffer, (byte) ((factor >> 24) ^ 0x80), invert);
+    writeByte(buffer, (byte) ( factor >> 16), invert);
+    writeByte(buffer, (byte) ( factor >> 8), invert);
+    writeByte(buffer, (byte)   factor, invert);
+
+    // The toDigitsOnlyBytes stores digits at the end of the scratch buffer.
+    serializeBytes(
+        buffer,
+        scratchBuffer, index, scratchBuffer.length - index,
+        signum == -1 ? !invert : invert);
+  }
+
+  // A HiveDecimalWritable version.
+  public static void serializeHiveDecimal(
+      ByteStream.Output buffer, HiveDecimalWritable decWritable, boolean invert,
+      byte[] scratchBuffer) {
+
+      // Get the sign of the decimal.
+      int signum = decWritable.signum();
+
+      // Get the 10^N power to turn digits into the desired decimal with a possible
+      // fractional part.
+      // To be compatible with the OldHiveDecimal version, zero has factor 1.
+      int factor;
+      if (signum == 0) {
+        factor = 1;
+      } else {
+        factor = decWritable.rawPrecision() - decWritable.scale();
+      }
+
+      // To make comparisons work properly, the "factor" gets the decimal's sign, too.
+      factor = signum == 1 ? factor : -factor;
+
+      // Convert just the decimal digits (no dot, sign, etc) into bytes.
+      //
+      // This is much faster than converting the BigInteger value from unscaledValue() which is no
+      // longer part of the HiveDecimal representation anymore to string, then bytes.
+      int index = decWritable.toDigitsOnlyBytes(scratchBuffer);
+
+      /*
+       * Finally write out the pieces (sign, power, digits)
+       */
+      writeByte(buffer, (byte) ( signum + 1), invert);
+      writeByte(buffer, (byte) ((factor >> 24) ^ 0x80), invert);
+      writeByte(buffer, (byte) ( factor >> 16), invert);
+      writeByte(buffer, (byte) ( factor >> 8), invert);
+      writeByte(buffer, (byte)   factor, invert);
+
+      // The toDigitsOnlyBytes stores digits at the end of the scratch buffer.
+      serializeBytes(
+          buffer,
+          scratchBuffer, index, scratchBuffer.length - index,
+          signum == -1 ? !invert : invert);
+  }
+
   @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
index a7785b2..41087dc 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
@@ -21,10 +21,11 @@ package org.apache.hadoop.hive.serde2.binarysortable.fast;
 import java.io.IOException;
 import java.math.BigInteger;
 import java.util.Arrays;
+import java.nio.charset.StandardCharsets;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.FastHiveDecimal;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.binarysortable.InputByteBuffer;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
@@ -391,6 +392,7 @@ public final class BinarySortableDeserializeRead extends DeserializeRead {
           length++;
         } while (true);
 
+        // CONSIDER: Allocate a larger initial size.
         if(tempDecimalBuffer == null || tempDecimalBuffer.length < length) {
           tempDecimalBuffer = new byte[length];
         }
@@ -403,29 +405,30 @@ public final class BinarySortableDeserializeRead extends DeserializeRead {
         // read the null byte again
         inputByteBuffer.read(positive ? invert : !invert);
 
-        String digits = new String(tempDecimalBuffer, 0, length, BinarySortableSerDe.decimalCharSet);
-        BigInteger bi = new BigInteger(digits);
-        HiveDecimal bd = HiveDecimal.create(bi).scaleByPowerOfTen(factor-length);
+        String digits = new String(tempDecimalBuffer, 0, length, StandardCharsets.UTF_8);
 
-        if (!positive) {
-          bd = bd.negate();
-        }
+        // Set the value of the writable from the decimal digits that were written with no dot.
+        int scale = length - factor;
+        currentHiveDecimalWritable.setFromDigitsOnlyBytesWithScale(
+            !positive, tempDecimalBuffer, 0, length, scale);
+        boolean decimalIsNull = !currentHiveDecimalWritable.isSet();
+        if (!decimalIsNull) {
 
-        // We have a decimal.  After we enforce precision and scale, will it become a NULL?
+          // We have a decimal.  After we enforce precision and scale, will it become a NULL?
 
-        currentHiveDecimalWritable.set(bd);
+          DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
 
-        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
+          int enforcePrecision = decimalTypeInfo.getPrecision();
+          int enforceScale = decimalTypeInfo.getScale();
 
-        int precision = decimalTypeInfo.getPrecision();
-        int scale = decimalTypeInfo.getScale();
+          decimalIsNull =
+              !currentHiveDecimalWritable.mutateEnforcePrecisionScale(
+                  enforcePrecision, enforceScale);
 
-        HiveDecimal decimal = currentHiveDecimalWritable.getHiveDecimal(precision, scale);
-        if (decimal == null) {
+        }
+        if (decimalIsNull) {
           return false;
         }
-        // Put value back into writable.
-        currentHiveDecimalWritable.set(decimal);
       }
       return true;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
index 62bcaa5..a9ea7c0 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hive.common.util.DateUtils;
 import org.slf4j.Logger;
@@ -61,6 +62,8 @@ public final class BinarySortableSerializeWrite implements SerializeWrite {
 
   private TimestampWritable tempTimestampWritable;
 
+  private byte[] decimalBytesScratch;
+
   public BinarySortableSerializeWrite(boolean[] columnSortOrderIsDesc,
           byte[] columnNullMarker, byte[] columnNotNullMarker) {
     this();
@@ -397,6 +400,9 @@ public final class BinarySortableSerializeWrite implements SerializeWrite {
 
   /*
    * DECIMAL.
+   *
+   * NOTE: The scale parameter is for text serialization (e.g. HiveDecimal.toFormatString) that
+   * creates trailing zeroes output decimals.
    */
   @Override
   public void writeHiveDecimal(HiveDecimal dec, int scale) throws IOException {
@@ -407,6 +413,24 @@ public final class BinarySortableSerializeWrite implements SerializeWrite {
     // This field is not a null.
     BinarySortableSerDe.writeByte(output, columnNotNullMarker[index], invert);
 
-    BinarySortableSerDe.serializeHiveDecimal(output, dec, invert);
+    if (decimalBytesScratch == null) {
+      decimalBytesScratch = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
+    }
+    BinarySortableSerDe.serializeHiveDecimal(output, dec, invert, decimalBytesScratch);
+  }
+
+  @Override
+  public void writeHiveDecimal(HiveDecimalWritable decWritable, int scale) throws IOException {
+    ++index;
+
+    final boolean invert = columnSortOrderIsDesc[index];
+
+    // This field is not a null.
+    BinarySortableSerDe.writeByte(output, columnNotNullMarker[index], invert);
+
+    if (decimalBytesScratch == null) {
+      decimalBytesScratch = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
+    }
+    BinarySortableSerDe.serializeHiveDecimal(output, decWritable, invert, decimalBytesScratch);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
index fb41420..17d2385 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
@@ -24,6 +24,7 @@ import java.sql.Timestamp;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
@@ -147,6 +148,10 @@ public interface SerializeWrite {
 
   /*
    * DECIMAL.
+   *
+   * NOTE: The scale parameter is for text serialization (e.g. HiveDecimal.toFormatString) that
+   * creates trailing zeroes output decimals.
    */
   void writeHiveDecimal(HiveDecimal dec, int scale) throws IOException;
+  void writeHiveDecimal(HiveDecimalWritable decWritable, int scale) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
index 4e82e9b..4d2ff22 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -66,31 +67,21 @@ public class LazyHiveDecimal extends LazyPrimitive<LazyHiveDecimalObjectInspecto
    */
   @Override
   public void init(ByteArrayRef bytes, int start, int length) {
-    String byteData = null;
-    try {
-      byteData = Text.decode(bytes.getData(), start, length);
-    } catch (CharacterCodingException e) {
-      isNull = true;
-      LOG.debug("Data not in the HiveDecimal data type range so converted to null.", e);
-      return;
-    }
 
-    HiveDecimal dec = HiveDecimal.create(byteData);
-    dec = enforcePrecisionScale(dec);
-    if (dec != null) {
-      data.set(dec);
-      isNull = false;
+    // Set the HiveDecimalWritable from bytes without converting to String first for
+    // better performance.
+    data.setFromBytes(bytes.getData(), start, length);
+    if (!data.isSet()) {
+      isNull = true;
     } else {
+      isNull = !data.mutateEnforcePrecisionScale(precision, scale);
+    }
+    if (isNull) {
       LOG.debug("Data not in the HiveDecimal data type range so converted to null. Given data is :"
-          + byteData);
-      isNull = true;
+          + new String(bytes.getData(), start, length, StandardCharsets.UTF_8));
     }
   }
 
-  private HiveDecimal enforcePrecisionScale(HiveDecimal dec) {
-    return HiveDecimal.enforcePrecisionScale(dec, precision, scale);
-  }
-
   @Override
   public HiveDecimalWritable getWritableObject() {
     return data;
@@ -107,8 +98,47 @@ public class LazyHiveDecimal extends LazyPrimitive<LazyHiveDecimalObjectInspecto
     if (hiveDecimal == null) {
       outputStream.write(nullBytes);
     } else {
-      ByteBuffer b = Text.encode(hiveDecimal.toFormatString(scale));
-      outputStream.write(b.array(), 0, b.limit());
+      byte[] scratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
+      int index = hiveDecimal.toFormatBytes(scale, scratchBuffer);
+      outputStream.write(scratchBuffer, index, scratchBuffer.length - index);
+    }
+  }
+
+  /**
+   * Writes HiveDecimal object to output stream as string
+   * @param outputStream
+   * @param hiveDecimal
+   * @throws IOException
+   */
+  public static void writeUTF8(
+      OutputStream outputStream,
+      HiveDecimal hiveDecimal, int scale,
+      byte[] scratchBuffer)
+    throws IOException {
+    if (hiveDecimal == null) {
+      outputStream.write(nullBytes);
+    } else {
+      int index = hiveDecimal.toFormatBytes(scale, scratchBuffer);
+      outputStream.write(scratchBuffer, index, scratchBuffer.length - index);
+    }
+  }
+
+  /**
+   * Writes HiveDecimalWritable object to output stream as string
+   * @param outputStream
+   * @param hiveDecimal
+   * @throws IOException
+   */
+  public static void writeUTF8(
+      OutputStream outputStream,
+      HiveDecimalWritable hiveDecimalWritable, int scale,
+      byte[] scratchBuffer)
+    throws IOException {
+    if (hiveDecimalWritable == null || !hiveDecimalWritable.isSet()) {
+      outputStream.write(nullBytes);
+    } else {
+      int index = hiveDecimalWritable.toFormatBytes(scale, scratchBuffer);
+      outputStream.write(scratchBuffer, index, scratchBuffer.length - index);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
index daf2cfb..a597fd7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
@@ -27,7 +27,6 @@ import java.util.Arrays;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
@@ -556,20 +555,24 @@ public final class LazySimpleDeserializeRead extends DeserializeRead {
           if (!LazyUtils.isNumberMaybe(bytes, fieldStart, fieldLength)) {
             return false;
           }
-          String byteData = new String(bytes, fieldStart, fieldLength, StandardCharsets.UTF_8);
-          HiveDecimal decimal = HiveDecimal.create(byteData);
-          DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
-          int precision = decimalTypeInfo.getPrecision();
-          int scale = decimalTypeInfo.getScale();
-          decimal = HiveDecimal.enforcePrecisionScale(decimal, precision, scale);
-          if (decimal == null) {
+          // Trim blanks because OldHiveDecimal did...
+          currentHiveDecimalWritable.setFromBytes(bytes, fieldStart, fieldLength, /* trimBlanks */ true);
+          boolean decimalIsNull = !currentHiveDecimalWritable.isSet();
+          if (!decimalIsNull) {
+            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
+
+            int precision = decimalTypeInfo.getPrecision();
+            int scale = decimalTypeInfo.getScale();
+
+            decimalIsNull = !currentHiveDecimalWritable.mutateEnforcePrecisionScale(precision, scale);
+          }
+          if (decimalIsNull) {
             if (LOG.isDebugEnabled()) {
               LOG.debug("Data not in the HiveDecimal data type range so converted to null. Given data is :"
-                + byteData);
+                + new String(bytes, fieldStart, fieldLength, StandardCharsets.UTF_8));
             }
             return false;
           }
-          currentHiveDecimalWritable.set(decimal);
         }
         return true;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index 280c2b0..1401ac3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -76,6 +77,7 @@ public final class LazySimpleSerializeWrite implements SerializeWrite {
   private HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable;
   private HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable;
   private HiveIntervalDayTime hiveIntervalDayTime;
+  private byte[] decimalScratchBuffer;
 
   public LazySimpleSerializeWrite(int fieldCount,
     byte separator, LazySerDeParameters lazyParams) {
@@ -475,14 +477,34 @@ public final class LazySimpleSerializeWrite implements SerializeWrite {
 
   /*
    * DECIMAL.
+   *
+   * NOTE: The scale parameter is for text serialization (e.g. HiveDecimal.toFormatString) that
+   * creates trailing zeroes output decimals.
    */
   @Override
-  public void writeHiveDecimal(HiveDecimal v, int scale) throws IOException {
+  public void writeHiveDecimal(HiveDecimal dec, int scale) throws IOException {
+    if (index > 0) {
+      output.write(separator);
+    }
+
+    if (decimalScratchBuffer == null) {
+      decimalScratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
+    }
+    LazyHiveDecimal.writeUTF8(output, dec, scale, decimalScratchBuffer);
+
+    index++;
+  }
+
+  @Override
+  public void writeHiveDecimal(HiveDecimalWritable decWritable, int scale) throws IOException {
     if (index > 0) {
       output.write(separator);
     }
 
-    LazyHiveDecimal.writeUTF8(output, v, scale);
+    if (decimalScratchBuffer == null) {
+      decimalScratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
+    }
+    LazyHiveDecimal.writeUTF8(output, decWritable, scale, decimalScratchBuffer);
 
     index++;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
index 55ab3e6..fe57df0 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
@@ -43,8 +43,13 @@ implements HiveDecimalObjectInspector {
       return null;
     }
 
-    HiveDecimal dec = ((LazyHiveDecimal)o).getWritableObject().getHiveDecimal();
-    return HiveDecimalUtils.enforcePrecisionScale(dec, (DecimalTypeInfo) typeInfo);
+    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
+    // We do not want to modify the writable provided by the object o since it is not a copy.
+    HiveDecimalWritable decWritable = ((LazyHiveDecimal)o).getWritableObject();
+    HiveDecimalWritable result = HiveDecimalWritable.enforcePrecisionScale(
+        decWritable, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
+
+    return (result != null && result.isSet() ? result.getHiveDecimal() : null);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
index f8469a7..b7bb67e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
@@ -44,9 +44,7 @@ public class LazyBinaryHiveDecimal extends
 
   @Override
   public void init(ByteArrayRef bytes, int start, int length) {
-    LazyBinarySerDe.setFromBytes(bytes.getData(), start, length, data);
-    HiveDecimal dec = data.getHiveDecimal(precision, scale);
-    data = dec == null ? null : new HiveDecimalWritable(dec);
+    LazyBinarySerDe.setFromBigIntegerBytesAndScale(bytes.getData(), start, length, data);
+    data.mutateEnforcePrecisionScale(precision, scale);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 54bfd2d..99abb5d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -316,7 +317,7 @@ public class LazyBinarySerDe extends AbstractSerDe {
     LazyBinaryUtils.writeVInt(byteStream, date.getDays());
   }
 
-  public static void setFromBytes(byte[] bytes, int offset, int length,
+  public static void setFromBigIntegerBytesAndScale(byte[] bytes, int offset, int length,
                                   HiveDecimalWritable dec) {
     LazyBinaryUtils.VInt vInt = new LazyBinaryUtils.VInt();
     LazyBinaryUtils.readVInt(bytes, offset, vInt);
@@ -324,20 +325,69 @@ public class LazyBinarySerDe extends AbstractSerDe {
     offset += vInt.length;
     LazyBinaryUtils.readVInt(bytes, offset, vInt);
     offset += vInt.length;
-    byte[] internalStorage = dec.getInternalStorage();
-    if (internalStorage.length != vInt.value) {
-      internalStorage = new byte[vInt.value];
-    }
-    System.arraycopy(bytes, offset, internalStorage, 0, vInt.value);
-    dec.set(internalStorage, scale);
+    dec.setFromBigIntegerBytesAndScale(bytes, offset, vInt.value, scale);
   }
 
   public static void writeToByteStream(RandomAccessOutput byteStream,
-                                       HiveDecimalWritable dec) {
-    LazyBinaryUtils.writeVInt(byteStream, dec.getScale());
-    byte[] internalStorage = dec.getInternalStorage();
-    LazyBinaryUtils.writeVInt(byteStream, internalStorage.length);
-    byteStream.write(internalStorage, 0, internalStorage.length);
+                                       HiveDecimalWritable decWritable) {
+    LazyBinaryUtils.writeVInt(byteStream, decWritable.scale());
+
+    // NOTE: This writes into a scratch buffer within HiveDecimalWritable.
+    //
+    int byteLength = decWritable.bigIntegerBytesInternalScratch();
+
+    LazyBinaryUtils.writeVInt(byteStream, byteLength);
+    byteStream.write(decWritable.bigIntegerBytesInternalScratchBuffer(), 0, byteLength);
+  }
+
+  /**
+   *
+   * Allocate scratchLongs with HiveDecimal.SCRATCH_LONGS_LEN longs.
+   * And, allocate scratch buffer with HiveDecimal.SCRATCH_BUFFER_LEN_BIG_INTEGER_BYTES bytes.
+   *
+   * @param byteStream
+   * @param dec
+   * @param scratchLongs
+   * @param buffer
+   */
+  public static void writeToByteStream(
+      RandomAccessOutput byteStream,
+      HiveDecimal dec,
+      long[] scratchLongs, byte[] scratchBytes) {
+    LazyBinaryUtils.writeVInt(byteStream, dec.scale());
+
+    // Convert decimal into the scratch buffer without allocating a byte[] each time
+    // for better performance.
+    int byteLength = 
+        dec.bigIntegerBytes(
+            scratchLongs, scratchBytes);
+    if (byteLength == 0) {
+      throw new RuntimeException("Decimal to binary conversion failed");
+    }
+    LazyBinaryUtils.writeVInt(byteStream, byteLength);
+    byteStream.write(scratchBytes, 0, byteLength);
+  }
+
+  /**
+  *
+  * Allocate scratchLongs with HiveDecimal.SCRATCH_LONGS_LEN longs.
+  * And, allocate scratch buffer with HiveDecimal.SCRATCH_BUFFER_LEN_BIG_INTEGER_BYTES bytes.
+  *
+  * @param byteStream
+  * @param dec
+  * @param scratchLongs
+  * @param buffer
+  */
+  public static void writeToByteStream(
+      RandomAccessOutput byteStream,
+      HiveDecimalWritable decWritable,
+      long[] scratchLongs, byte[] scratchBytes) {
+    LazyBinaryUtils.writeVInt(byteStream, decWritable.scale());
+    int byteLength =
+        decWritable.bigIntegerBytes(
+            scratchLongs, scratchBytes);
+    LazyBinaryUtils.writeVInt(byteStream, byteLength);
+    byteStream.write(scratchBytes, 0, byteLength);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
index ee945d4..e94ae99 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
@@ -24,7 +24,6 @@ import java.util.Arrays;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
@@ -313,8 +312,8 @@ public final class LazyBinaryDeserializeRead extends DeserializeRead {
             throw new EOFException();
           }
           LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
-          int saveStart = offset;
           offset += tempVInt.length;
+          int readScale = tempVInt.value;
 
           // Parse the first byte of a vint/vlong to determine the number of bytes.
           if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
@@ -322,7 +321,7 @@ public final class LazyBinaryDeserializeRead extends DeserializeRead {
           }
           LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
           offset += tempVInt.length;
-
+          int saveStart = offset;
           offset += tempVInt.value;
           // Last item -- ok to be at end.
           if (offset > end) {
@@ -330,16 +329,23 @@ public final class LazyBinaryDeserializeRead extends DeserializeRead {
           }
           int length = offset - saveStart;
 
-          LazyBinarySerDe.setFromBytes(bytes, saveStart, length,
-              currentHiveDecimalWritable);
+          //   scale = 2, length = 6, value = -6065716379.11
+          //   \002\006\255\114\197\131\083\105
+          //           \255\114\197\131\083\105
 
-          DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
+          currentHiveDecimalWritable.setFromBigIntegerBytesAndScale(
+              bytes, saveStart, length, readScale);
+          boolean decimalIsNull = !currentHiveDecimalWritable.isSet();
+          if (!decimalIsNull) {
 
-          int precision = decimalTypeInfo.getPrecision();
-          int scale = decimalTypeInfo.getScale();
+            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
 
-          HiveDecimal decimal = currentHiveDecimalWritable.getHiveDecimal(precision, scale);
-          if (decimal == null) {
+            int precision = decimalTypeInfo.getPrecision();
+            int scale = decimalTypeInfo.getScale();
+
+            decimalIsNull = !currentHiveDecimalWritable.mutateEnforcePrecisionScale(precision, scale);
+          }
+          if (decimalIsNull) {
 
             // Logically move past this field.
             fieldIndex++;
@@ -356,8 +362,6 @@ public final class LazyBinaryDeserializeRead extends DeserializeRead {
             }
             return false;
           }
-          // Put value back into writable.
-          currentHiveDecimalWritable.set(decimal);
         }
         break;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index 91ef12d..6bc4622 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -56,11 +56,12 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
   private long nullOffset;
 
   // For thread safety, we allocate private writable objects for our use only.
-  private HiveDecimalWritable hiveDecimalWritable;
   private TimestampWritable timestampWritable;
   private HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable;
   private HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable;
   private HiveIntervalDayTime hiveIntervalDayTime;
+  private long[] scratchLongs;
+  private byte[] scratchBuffer;
 
   public LazyBinarySerializeWrite(int fieldCount) {
     this();
@@ -675,9 +676,48 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
 
   /*
    * DECIMAL.
+   *
+   * NOTE: The scale parameter is for text serialization (e.g. HiveDecimal.toFormatString) that
+   * creates trailing zeroes output decimals.
    */
   @Override
-  public void writeHiveDecimal(HiveDecimal v, int scale) throws IOException {
+  public void writeHiveDecimal(HiveDecimal dec, int scale) throws IOException {
+
+    // Every 8 fields we write a NULL byte.
+    if ((fieldIndex % 8) == 0) {
+      if (fieldIndex > 0) {
+        // Write back previous 8 field's NULL byte.
+        output.writeByte(nullOffset, nullByte);
+        nullByte = 0;
+        nullOffset = output.getLength();
+      }
+      // Allocate next NULL byte.
+      output.reserve(1);
+    }
+
+    // Set bit in NULL byte when a field is NOT NULL.
+    nullByte |= 1 << (fieldIndex % 8);
+
+    if (scratchLongs == null) {
+      scratchLongs = new long[HiveDecimal.SCRATCH_LONGS_LEN];
+      scratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_BIG_INTEGER_BYTES];
+    }
+    LazyBinarySerDe.writeToByteStream(
+        output,
+        dec,
+        scratchLongs,
+        scratchBuffer);
+
+    fieldIndex++;
+
+    if (fieldIndex == fieldCount) {
+      // Write back the final NULL byte before the last fields.
+      output.writeByte(nullOffset, nullByte);
+    }
+  }
+
+  @Override
+  public void writeHiveDecimal(HiveDecimalWritable decWritable, int scale) throws IOException {
 
     // Every 8 fields we write a NULL byte.
     if ((fieldIndex % 8) == 0) {
@@ -694,11 +734,15 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
     // Set bit in NULL byte when a field is NOT NULL.
     nullByte |= 1 << (fieldIndex % 8);
 
-    if (hiveDecimalWritable == null) {
-      hiveDecimalWritable = new HiveDecimalWritable();
+    if (scratchLongs == null) {
+      scratchLongs = new long[HiveDecimal.SCRATCH_LONGS_LEN];
+      scratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_BIG_INTEGER_BYTES];
     }
-    hiveDecimalWritable.set(v);
-    LazyBinarySerDe.writeToByteStream(output, hiveDecimalWritable);
+    LazyBinarySerDe.writeToByteStream(
+        output,
+        decWritable,
+        scratchLongs,
+        scratchBuffer);
 
     fieldIndex++;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index 1ac72c6..6945a67 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -698,6 +698,8 @@ public final class ObjectInspectorUtils {
             .getPrimitiveWritableObject(o);
         return intervalDayTime.hashCode();
       case DECIMAL:
+        // Since getBucketHashCode uses this, HiveDecimal return the old (much slower) but
+        // compatible hash code.
         return ((HiveDecimalObjectInspector) poi).getPrimitiveWritableObject(o).hashCode();
 
       default: {

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
index 26b19f5..9642a7e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
@@ -580,7 +580,24 @@ public final class PrimitiveObjectInspectorUtils {
    * NumberFormatException will be thrown if o is not a valid number.
    */
   public static byte getByte(Object o, PrimitiveObjectInspector oi) {
-    return (byte) getInt(o, oi);
+    byte result;
+    switch (oi.getPrimitiveCategory()) {
+    case DECIMAL:
+      {
+        HiveDecimal dec = ((HiveDecimalObjectInspector) oi)
+            .getPrimitiveJavaObject(o);
+        if (!dec.isByte()) {
+          throw new NumberFormatException();
+        }
+        result = dec.byteValue();
+      }
+      break;
+    default:
+      // For all other data types, use int conversion.  At some point, we should have all
+      // conversions make sure the value fits.
+      return (byte) getInt(o, oi);
+    }
+    return result;
   }
 
   /**
@@ -589,7 +606,24 @@ public final class PrimitiveObjectInspectorUtils {
    * NumberFormatException will be thrown if o is not a valid number.
    */
   public static short getShort(Object o, PrimitiveObjectInspector oi) {
-    return (short) getInt(o, oi);
+    short result;
+    switch (oi.getPrimitiveCategory()) {
+    case DECIMAL:
+      {
+        HiveDecimal dec = ((HiveDecimalObjectInspector) oi)
+            .getPrimitiveJavaObject(o);
+        if (!dec.isShort()) {
+          throw new NumberFormatException();
+        }
+        result = dec.shortValue();
+      }
+      break;
+    default:
+      // For all other data types, use int conversion.  At some point, we should have all
+      // conversions make sure the value fits.
+      return (short) getInt(o, oi);
+    }
+    return result;
   }
 
   /**
@@ -653,8 +687,14 @@ public final class PrimitiveObjectInspectorUtils {
           .getPrimitiveWritableObject(o).getSeconds());
       break;
     case DECIMAL:
-      result = ((HiveDecimalObjectInspector) oi)
-          .getPrimitiveJavaObject(o).intValue();  // TODO: lossy conversion!
+      {
+        HiveDecimal dec = ((HiveDecimalObjectInspector) oi)
+            .getPrimitiveJavaObject(o);
+        if (!dec.isInt()) {
+          throw new NumberFormatException();
+        }
+        result = dec.intValue();
+      }
       break;
     case DATE:  // unsupported conversion
     default: {
@@ -717,8 +757,14 @@ public final class PrimitiveObjectInspectorUtils {
           .getSeconds();
       break;
     case DECIMAL:
-      result = ((HiveDecimalObjectInspector) oi)
-          .getPrimitiveJavaObject(o).longValue();  // TODO: lossy conversion!
+      {
+        HiveDecimal dec = ((HiveDecimalObjectInspector) oi)
+            .getPrimitiveJavaObject(o);
+        if (!dec.isLong()) {
+          throw new NumberFormatException();
+        }
+        result = dec.longValue();
+      }
       break;
     case DATE:  // unsupported conversion
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
index b87d1f8..daa51c1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
@@ -43,17 +43,16 @@ implements ConstantObjectInspector {
 
   @Override
   public HiveDecimalWritable getWritableConstantValue() {
+
     // We need to enforce precision/scale here.
-    // A little inefficiency here as we need to create a HiveDecimal instance from the writable and
-    // recreate a HiveDecimalWritable instance on the HiveDecimal instance. However, we don't know
-    // the precision/scale of the original writable until we get a HiveDecimal instance from it.
-    DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)typeInfo;
-    HiveDecimal dec = value == null ? null :
-      value.getHiveDecimal(decTypeInfo.precision(), decTypeInfo.scale());
-    if (dec == null) {
+
+    DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) typeInfo;
+    HiveDecimalWritable result = new HiveDecimalWritable(value);
+    result.mutateEnforcePrecisionScale(decTypeInfo.precision(), decTypeInfo.scale());
+    if (!result.isSet()) {
       return null;
     }
-    return new HiveDecimalWritable(dec);
+    return result;
   }
 
   @Override
@@ -61,7 +60,7 @@ implements ConstantObjectInspector {
     if (value == null) {
       return super.precision();
     }
-    return value.getHiveDecimal().precision();
+    return value.precision();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
index 5caaf6b..cee9c45 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
@@ -34,8 +34,9 @@ public class HiveDecimalUtils {
       return null;
     }
 
-    HiveDecimal dec = enforcePrecisionScale(writable.getHiveDecimal(), typeInfo);
-    return dec == null ? null : new HiveDecimalWritable(dec);
+    HiveDecimalWritable result = new HiveDecimalWritable(writable);
+    result.mutateEnforcePrecisionScale(typeInfo.precision(), typeInfo.scale());
+    return (result.isSet() ? result : null);
   }
 
   public static void validateParameter(int precision, int scale) {

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
index f08a075..301ee8b 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
@@ -269,27 +269,27 @@ public class SerdeRandomRowSource {
       {
         WritableHiveCharObjectInspector writableCharObjectInspector =
                 new WritableHiveCharObjectInspector( (CharTypeInfo) primitiveTypeInfo);
-        return writableCharObjectInspector.create(new HiveChar(StringUtils.EMPTY, -1));
+        return writableCharObjectInspector.create((HiveChar) object);
       }
     case VARCHAR:
       {
         WritableHiveVarcharObjectInspector writableVarcharObjectInspector =
                 new WritableHiveVarcharObjectInspector( (VarcharTypeInfo) primitiveTypeInfo);
-        return writableVarcharObjectInspector.create(new HiveVarchar(StringUtils.EMPTY, -1));
+        return writableVarcharObjectInspector.create((HiveVarchar) object);
       }
     case BINARY:
-      return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create(ArrayUtils.EMPTY_BYTE_ARRAY);
+      return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
     case TIMESTAMP:
-      return ((WritableTimestampObjectInspector) objectInspector).create(new Timestamp(0));
+      return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
     case INTERVAL_YEAR_MONTH:
-      return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create(new HiveIntervalYearMonth(0));
+      return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
     case INTERVAL_DAY_TIME:
-      return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create(new HiveIntervalDayTime(0, 0));
+      return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
     case DECIMAL:
       {
         WritableHiveDecimalObjectInspector writableDecimalObjectInspector =
                 new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
-        return writableDecimalObjectInspector.create(HiveDecimal.ZERO);
+        return writableDecimalObjectInspector.create((HiveDecimal) object);
       }
     default:
       throw new Error("Unknown primitive category " + primitiveCategory);
@@ -331,7 +331,10 @@ public class SerdeRandomRowSource {
     case INTERVAL_DAY_TIME:
       return getRandIntervalDayTime(r);
     case DECIMAL:
-      return getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
+      {
+        HiveDecimal dec = getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
+        return dec;
+      }
     default:
       throw new Error("Unknown primitive category " + primitiveCategory);
     }
@@ -382,14 +385,9 @@ public class SerdeRandomRowSource {
         sb.append(".");
         sb.append(RandomTypeUtil.getRandString(r, DECIMAL_CHARS, scale));
       }
+      HiveDecimal dec = HiveDecimal.create(sb.toString());
 
-      HiveDecimal bd = HiveDecimal.create(sb.toString());
-      if (bd.scale() > bd.precision()) {
-        // Sometimes weird decimals are produced?
-        continue;
-      }
-
-      return bd;
+      return dec;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
index 3ac339d..19b04bb 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
@@ -68,7 +68,7 @@ public class VerifyFast {
     isNull = !deserializeRead.readNextField();
     if (isNull) {
       if (writable != null) {
-        TestCase.fail("Field reports null but object is not null");
+        TestCase.fail("Field reports null but object is not null (class " + writable.getClass().getName() + ", " + writable.toString() + ")");
       }
       return;
     } else if (writable == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
index 45be2dd..93eafc1 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
@@ -24,6 +24,7 @@ import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.generic.GenericEnumSymbol;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveDecimalV1;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -121,7 +122,7 @@ public class TestAvroSerializer {
 
   @Test
   public void canSerializeDecimals() throws SerDeException, IOException {
-    ByteBuffer bb = ByteBuffer.wrap(HiveDecimal.create("3.1416").unscaledValue().toByteArray());
+    ByteBuffer bb = ByteBuffer.wrap(HiveDecimal.create("3.1416").bigIntegerBytes());
     singleFieldTest("dec1", bb.rewind(),
         "{\"type\":\"bytes\", \"logicalType\":\"decimal\", \"precision\":5, \"scale\":4}");
   }
@@ -229,7 +230,10 @@ public class TestAvroSerializer {
     HiveDecimal dec = HiveDecimal.create("3.1415926");
     r = serializeAndDeserialize(field, "union1", AvroSerdeUtils.getBufferFromDecimal(dec, 4));
     HiveDecimal dec1 = AvroSerdeUtils.getHiveDecimalFromByteBuffer((ByteBuffer) r.get("union1"), 4);
-    assertEquals(dec.setScale(4), dec1);
+
+    // For now, old class.
+    HiveDecimalV1 oldDec = HiveDecimalV1.create(dec.bigDecimalValue());
+    assertEquals(oldDec.setScale(4).toString(), dec1.toString());
   }
 
   private enum enum1 {BLUE, RED , GREEN};

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
index 321b574..18b2032 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
@@ -203,18 +203,10 @@ public class MyTestPrimitiveClass {
           sb.append(getRandString(r, DECIMAL_CHARS, scale));
         }
 
-        HiveDecimal bd = HiveDecimal.create(sb.toString());
-        extraTypeInfo.precision = bd.precision();
-        extraTypeInfo.scale = bd.scale();
-        if (extraTypeInfo.scale > extraTypeInfo.precision) {
-          // Sometimes weird decimals are produced?
-          continue;
-        }
-
-        // For now, punt.
-        extraTypeInfo.precision = HiveDecimal.SYSTEM_DEFAULT_PRECISION;
-        extraTypeInfo.scale = HiveDecimal.SYSTEM_DEFAULT_SCALE;
-        return bd;
+        HiveDecimal dec = HiveDecimal.create(sb.toString());
+        extraTypeInfo.precision = dec.precision();
+        extraTypeInfo.scale = dec.scale();
+        return dec;
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java
index 1c84fe6..5f5b03a 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.serde2.binarysortable;
 
+import java.util.ArrayList;
 import java.io.EOFException;
 import java.util.Arrays;
 import java.util.List;
@@ -196,7 +197,11 @@ public class TestBinarySortableFast extends TestCase {
           }
         } else {
           if (!object.equals(expected)) {
-            fail("SerDe deserialized value does not match");
+            fail("SerDe deserialized value does not match (expected " +
+              expected.getClass().getName() + " " +
+              expected.toString() + ", actual " +
+              object.getClass().getName() + " " +
+              object.toString() + ")");
           }
         }
       }
@@ -233,11 +238,37 @@ public class TestBinarySortableFast extends TestCase {
           fail("Different byte array lengths: serDeOutput.length " + serDeOutput.length + ", serializeWriteExpected.length " + serializeWriteExpected.length +
                   " mismatchPos " + mismatchPos + " perFieldWriteLengths " + Arrays.toString(perFieldWriteLengthsArray[i]));
         }
+        List<Integer> differentPositions = new ArrayList();
         for (int b = 0; b < serDeOutput.length; b++) {
           if (serDeOutput[b] != serializeWriteExpected[b]) {
-            fail("SerializeWrite and SerDe serialization does not match at position " + b);
+            differentPositions.add(b);
           }
         }
+        if (differentPositions.size() > 0) {
+          List<String> serializeWriteExpectedFields = new ArrayList<String>();
+          List<String> serDeFields = new ArrayList<String>();
+          int f = 0;
+          int lastBegin = 0;
+          for (int b = 0; b < serDeOutput.length; b++) {
+            int writeLength = perFieldWriteLengthsArray[i][f];
+            if (b + 1 == writeLength) {
+              serializeWriteExpectedFields.add(
+                  displayBytes(serializeWriteExpected, lastBegin, writeLength - lastBegin));
+              serDeFields.add(
+                  displayBytes(serDeOutput, lastBegin, writeLength - lastBegin));
+              f++;
+              lastBegin = b + 1;
+            }
+          }
+          fail("SerializeWrite and SerDe serialization does not match at positions " + differentPositions.toString() +
+              "\n(SerializeWrite: " +
+                  serializeWriteExpectedFields.toString() +
+              "\nSerDe: " +
+                  serDeFields.toString() +
+              "\nperFieldWriteLengths " + Arrays.toString(perFieldWriteLengthsArray[i]) +
+              "\nprimitiveTypeInfos " + Arrays.toString(primitiveTypeInfos) +
+              "\nrow " + Arrays.toString(row));
+        }
       }
       serdeBytes[i] = bytesWritable;
     }
@@ -426,4 +457,12 @@ public class TestBinarySortableFast extends TestCase {
       throw e;
     }
   }
+
+  private static String displayBytes(byte[] bytes, int start, int length) {
+    StringBuilder sb = new StringBuilder();
+    for (int i = start; i < start + length; i++) {
+      sb.append(String.format("\\%03d", (int) (bytes[i] & 0xff)));
+    }
+    return sb.toString();
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
deleted file mode 100644
index 3b12514..0000000
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveDecimalWritable.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.serde2.io;
-
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayList;
-
-import org.apache.hadoop.hive.common.type.Decimal128;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hive.common.util.Decimal128FastBuffer;
-
-/**
- * Unit tests for tsting the fast allocation-free conversion
- * between HiveDecimalWritable and Decimal128
- */
-public class TestHiveDecimalWritable {
-
-    @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-    @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-    @Before
-    public void setUp() throws Exception {
-    }
-
-    private void doTestFastStreamForHiveDecimal(String valueString) {
-      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
-      BigDecimal value = new BigDecimal(valueString);
-      Decimal128 dec = new Decimal128();
-      dec.update(value);
-
-      HiveDecimalWritable witness = new HiveDecimalWritable();
-      witness.set(HiveDecimal.create(value));
-
-      int bufferUsed = dec.fastSerializeForHiveDecimal(scratch);
-      HiveDecimalWritable hdw = new HiveDecimalWritable();
-      hdw.set(scratch.getBytes(bufferUsed), dec.getScale());
-
-      HiveDecimal hd = hdw.getHiveDecimal();
-
-      BigDecimal readValue = hd.bigDecimalValue();
-
-      assertEquals(value, readValue);
-
-      // Now test fastUpdate from the same serialized HiveDecimal
-      Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
-              witness.getInternalStorage(), (short) witness.getScale());
-
-      assertEquals(dec, decRead);
-
-      // Test fastUpdate from it's own (not fully compacted) serialized output
-      Decimal128 decReadSelf = new Decimal128().fastUpdateFromInternalStorage(
-              hdw.getInternalStorage(), (short) hdw.getScale());
-      assertEquals(dec, decReadSelf);
-    }
-
-    @Test
-    @Concurrent(count=4)
-    @Repeating(repetition=100)
-    public void testFastStreamForHiveDecimal() {
-
-      doTestFastStreamForHiveDecimal("0");
-      doTestFastStreamForHiveDecimal("-0");
-      doTestFastStreamForHiveDecimal("1");
-      doTestFastStreamForHiveDecimal("-1");
-      doTestFastStreamForHiveDecimal("2");
-      doTestFastStreamForHiveDecimal("-2");
-      doTestFastStreamForHiveDecimal("127");
-      doTestFastStreamForHiveDecimal("-127");
-      doTestFastStreamForHiveDecimal("128");
-      doTestFastStreamForHiveDecimal("-128");
-      doTestFastStreamForHiveDecimal("255");
-      doTestFastStreamForHiveDecimal("-255");
-      doTestFastStreamForHiveDecimal("256");
-      doTestFastStreamForHiveDecimal("-256");
-      doTestFastStreamForHiveDecimal("65535");
-      doTestFastStreamForHiveDecimal("-65535");
-      doTestFastStreamForHiveDecimal("65536");
-      doTestFastStreamForHiveDecimal("-65536");
-
-      doTestFastStreamForHiveDecimal("10");
-      doTestFastStreamForHiveDecimal("1000");
-      doTestFastStreamForHiveDecimal("1000000");
-      doTestFastStreamForHiveDecimal("1000000000");
-      doTestFastStreamForHiveDecimal("1000000000000");
-      doTestFastStreamForHiveDecimal("1000000000000000");
-      doTestFastStreamForHiveDecimal("1000000000000000000");
-      doTestFastStreamForHiveDecimal("1000000000000000000000");
-      doTestFastStreamForHiveDecimal("1000000000000000000000000");
-      doTestFastStreamForHiveDecimal("1000000000000000000000000000");
-      doTestFastStreamForHiveDecimal("1000000000000000000000000000000");
-
-      doTestFastStreamForHiveDecimal("-10");
-      doTestFastStreamForHiveDecimal("-1000");
-      doTestFastStreamForHiveDecimal("-1000000");
-      doTestFastStreamForHiveDecimal("-1000000000");
-      doTestFastStreamForHiveDecimal("-1000000000000");
-      doTestFastStreamForHiveDecimal("-1000000000000000000");
-      doTestFastStreamForHiveDecimal("-1000000000000000000000");
-      doTestFastStreamForHiveDecimal("-1000000000000000000000000");
-      doTestFastStreamForHiveDecimal("-1000000000000000000000000000");
-      doTestFastStreamForHiveDecimal("-1000000000000000000000000000000");
-
-
-      doTestFastStreamForHiveDecimal("0.01");
-      doTestFastStreamForHiveDecimal("-0.01");
-      doTestFastStreamForHiveDecimal("0.02");
-      doTestFastStreamForHiveDecimal("-0.02");
-      doTestFastStreamForHiveDecimal("0.0127");
-      doTestFastStreamForHiveDecimal("-0.0127");
-      doTestFastStreamForHiveDecimal("0.0128");
-      doTestFastStreamForHiveDecimal("-0.0128");
-      doTestFastStreamForHiveDecimal("0.0255");
-      doTestFastStreamForHiveDecimal("-0.0255");
-      doTestFastStreamForHiveDecimal("0.0256");
-      doTestFastStreamForHiveDecimal("-0.0256");
-      doTestFastStreamForHiveDecimal("0.065535");
-      doTestFastStreamForHiveDecimal("-0.065535");
-      doTestFastStreamForHiveDecimal("0.065536");
-      doTestFastStreamForHiveDecimal("-0.065536");
-
-      doTestFastStreamForHiveDecimal("0.101");
-      doTestFastStreamForHiveDecimal("0.10001");
-      doTestFastStreamForHiveDecimal("0.10000001");
-      doTestFastStreamForHiveDecimal("0.10000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000000000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000000000000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000000000000000000001");
-      doTestFastStreamForHiveDecimal("0.10000000000000000000000000000001");
-
-      doTestFastStreamForHiveDecimal("-0.101");
-      doTestFastStreamForHiveDecimal("-0.10001");
-      doTestFastStreamForHiveDecimal("-0.10000001");
-      doTestFastStreamForHiveDecimal("-0.10000000001");
-      doTestFastStreamForHiveDecimal("-0.10000000000001");
-      doTestFastStreamForHiveDecimal("-0.10000000000000000001");
-      doTestFastStreamForHiveDecimal("-0.10000000000000000000001");
-      doTestFastStreamForHiveDecimal("-0.10000000000000000000000001");
-      doTestFastStreamForHiveDecimal("-0.10000000000000000000000000001");
-      doTestFastStreamForHiveDecimal("-0.10000000000000000000000000000001");
-
-      doTestFastStreamForHiveDecimal(Integer.toString(Integer.MAX_VALUE));
-      doTestFastStreamForHiveDecimal(Integer.toString(Integer.MIN_VALUE));
-      doTestFastStreamForHiveDecimal(Long.toString(Long.MAX_VALUE));
-      doTestFastStreamForHiveDecimal(Long.toString(Long.MIN_VALUE));
-      doTestFastStreamForHiveDecimal(Decimal128.MAX_VALUE.toFormalString());
-      doTestFastStreamForHiveDecimal(Decimal128.MIN_VALUE.toFormalString());
-
-            // Test known serialization tricky values
-      int[] values = new int[] {
-              0x80,
-              0x8000,
-              0x800000,
-              0x80000000,
-              0x81,
-                    0x8001,
-                    0x800001,
-                    0x80000001,
-              0x7f,
-              0x7fff,
-              0x7fffff,
-              0x7fffffff,
-              0xff,
-              0xffff,
-              0xffffff,
-              0xffffffff};
-
-
-      for(int value: values) {
-          for (int i = 0; i < 4; ++i) {
-              int[] pos = new int[] {1, 0, 0, 0, 0};
-              int[] neg = new int[] {0xff, 0, 0, 0, 0};
-
-              pos[i+1] = neg[i+1] = value;
-
-              doTestDecimalWithBoundsCheck(new Decimal128().update32(pos, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update32(neg, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update64(pos, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update64(neg, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update96(pos, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update96(neg, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update128(pos, 0));
-              doTestDecimalWithBoundsCheck(new Decimal128().update128(neg, 0));
-          }
-      }
-    }
-
-    void doTestDecimalWithBoundsCheck(Decimal128 value) {
-       if ((value.compareTo(Decimal128.MAX_VALUE)) > 0 ||
-           (value.compareTo(Decimal128.MIN_VALUE)) < 0) {
-             // Ignore this one, out of bounds and HiveDecimal will NPE
-             return;
-       }
-       doTestFastStreamForHiveDecimal(value.toFormalString());
-    }
-
-    @Test
-    @Concurrent(count=4)
-    @Repeating(repetition=100)
-    public void testHive6594() {
-      Decimal128FastBuffer scratch = new Decimal128FastBuffer();
-      String[] vs = new String[] {
-          "-4033.445769230769",
-          "6984454.211097692"};
-
-      Decimal128 d = new Decimal128(0L, (short) 14);
-      for (String s:vs) {
-        Decimal128 p = new Decimal128(s, (short) 14);
-        d.addDestructive(p, (short) (short) 14);
-      }
-
-      int bufferUsed = d.fastSerializeForHiveDecimal(scratch);
-      HiveDecimalWritable hdw = new HiveDecimalWritable();
-      hdw.set(scratch.getBytes(bufferUsed), d.getScale());
-
-      HiveDecimal hd = hdw.getHiveDecimal();
-
-      BigDecimal readValue = hd.bigDecimalValue();
-
-      assertEquals(d.toBigDecimal().stripTrailingZeros(),
-          readValue.stripTrailingZeros());
-    }
-}
-