You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2015/07/28 21:53:55 UTC

hive git commit: HIVE-11209. Clean up dependencies in HiveDecimalWritable. (omalley reviewed by prasanthj)

Repository: hive
Updated Branches:
  refs/heads/master c178a6e9d -> 29651cd37


HIVE-11209. Clean up dependencies in HiveDecimalWritable. (omalley reviewed by
prasanthj)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/29651cd3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/29651cd3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/29651cd3

Branch: refs/heads/master
Commit: 29651cd370120ace1d514b8aab4880936228d45e
Parents: c178a6e
Author: Owen O'Malley <om...@apache.org>
Authored: Tue Jul 28 12:53:00 2015 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Tue Jul 28 12:53:00 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/common/type/HiveDecimal.java    |  6 +++
 .../hive/ql/io/orc/TreeReaderFactory.java       | 12 ++---
 .../hive/ql/udf/generic/GenericUDFBridge.java   |  8 +--
 .../apache/hadoop/hive/serde2/SerDeUtils.java   | 14 +++++-
 .../hive/serde2/io/HiveDecimalWritable.java     | 51 ++++++--------------
 .../hive/serde2/lazy/LazyHiveDecimal.java       |  3 +-
 .../lazy/fast/LazySimpleDeserializeRead.java    | 16 ++----
 .../lazybinary/LazyBinaryHiveDecimal.java       |  2 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java | 26 +++++++++-
 .../fast/LazyBinaryDeserializeRead.java         |  6 ++-
 .../fast/LazyBinarySerializeWrite.java          |  5 +-
 .../hive/serde2/typeinfo/HiveDecimalUtils.java  | 35 +-------------
 12 files changed, 86 insertions(+), 98 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java b/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
index f14fc2d..7d7fb28 100644
--- a/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
+++ b/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
@@ -296,6 +296,12 @@ public class HiveDecimal implements Comparable<HiveDecimal> {
       return null;
     }
 
+    // Minor optimization, avoiding creating new objects.
+    if (dec.precision() - dec.scale() <= maxPrecision - maxScale &&
+        dec.scale() <= maxScale) {
+      return dec;
+    }
+
     BigDecimal bd = enforcePrecisionScale(dec.bd, maxPrecision, maxScale);
     if (bd == null) {
       return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
index 3ff6b14..9bfe268 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -1261,9 +1260,10 @@ public class TreeReaderFactory {
         } else {
           result = (HiveDecimalWritable) previous;
         }
-        result.set(HiveDecimal.create(SerializationUtils.readBigInteger(valueStream),
-            (int) scaleReader.next()));
-        return HiveDecimalUtils.enforcePrecisionScale(result, precision, scale);
+        result.set(HiveDecimal.create(SerializationUtils.readBigInteger
+                (valueStream), (int) scaleReader.next()));
+        return HiveDecimalWritable.enforcePrecisionScale(result, precision,
+            scale);
       }
       return null;
     }
@@ -1289,7 +1289,7 @@ public class TreeReaderFactory {
           BigInteger bInt = SerializationUtils.readBigInteger(valueStream);
           short scaleInData = (short) scaleReader.next();
           HiveDecimal dec = HiveDecimal.create(bInt, scaleInData);
-          dec = HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
+          dec = HiveDecimal.enforcePrecisionScale(dec, precision, scale);
           result.set(0, dec);
         }
       } else {
@@ -1301,7 +1301,7 @@ public class TreeReaderFactory {
             BigInteger bInt = SerializationUtils.readBigInteger(valueStream);
             short scaleInData = (short) scratchScaleVector.vector[i];
             HiveDecimal dec = HiveDecimal.create(bInt, scaleInData);
-            dec = HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
+            dec = HiveDecimal.enforcePrecisionScale(dec, precision, scale);
             result.set(i, dec);
           }
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
index e471285..6098ddd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
@@ -22,7 +22,6 @@ import java.io.Serializable;
 import java.lang.reflect.Method;
 import java.util.ArrayList;
 
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDF;
@@ -34,7 +33,6 @@ import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
@@ -185,8 +183,10 @@ public class GenericUDFBridge extends GenericUDF implements Serializable {
     // For non-generic UDF, type info isn't available. This poses a problem for Hive Decimal.
     // If the returned value is HiveDecimal, we assume maximum precision/scale.
     if (result != null && result instanceof HiveDecimalWritable) {
-      result = HiveDecimalUtils.enforcePrecisionScale((HiveDecimalWritable) result,
-          HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
+      result = HiveDecimalWritable.enforcePrecisionScale
+          ((HiveDecimalWritable) result,
+              HiveDecimal.SYSTEM_DEFAULT_PRECISION,
+              HiveDecimal.SYSTEM_DEFAULT_SCALE);
     }
 
     return result;

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
index c65174e..192e814 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
@@ -27,6 +27,8 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -44,6 +46,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspe
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
@@ -201,7 +205,6 @@ public final class SerDeUtils {
 
 
   static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi, String nullStr) {
-
     switch (oi.getCategory()) {
     case PRIMITIVE: {
       PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
@@ -284,6 +287,15 @@ public final class SerDeUtils {
           sb.append(((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o));
           break;
         }
+        case INTERVAL_YEAR_MONTH: {
+          sb.append(((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveJavaObject(o));
+          break;
+        }
+        case INTERVAL_DAY_TIME: {
+          sb.append(((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveJavaObject(o));
+          break;
+        }
+
         default:
           throw new RuntimeException("Unknown primitive type: "
               + poi.getPrimitiveCategory());

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
index 885828a..0578d24 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
@@ -24,13 +24,8 @@ import java.math.BigInteger;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
+
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableUtils;
 
@@ -41,8 +36,6 @@ public class HiveDecimalWritable implements WritableComparable<HiveDecimalWritab
   private byte[] internalStorage = new byte[0];
   private int scale;
 
-  private final VInt vInt = new VInt(); // reusable integer
-
   public HiveDecimalWritable() {
   }
 
@@ -83,18 +76,6 @@ public class HiveDecimalWritable implements WritableComparable<HiveDecimalWritab
     this.scale = scale;
   }
 
-  public void setFromBytes(byte[] bytes, int offset, int length) {
-    LazyBinaryUtils.readVInt(bytes, offset, vInt);
-    scale = vInt.value;
-    offset += vInt.length;
-    LazyBinaryUtils.readVInt(bytes, offset, vInt);
-    offset += vInt.length;
-    if (internalStorage.length != vInt.value) {
-      internalStorage = new byte[vInt.value];
-    }
-    System.arraycopy(bytes, offset, internalStorage, 0, vInt.value);
-  }
-
   public HiveDecimal getHiveDecimal() {
     return HiveDecimal.create(new BigInteger(internalStorage), scale);
   }
@@ -107,7 +88,8 @@ public class HiveDecimalWritable implements WritableComparable<HiveDecimalWritab
    * @return HiveDecimal instance
    */
   public HiveDecimal getHiveDecimal(int maxPrecision, int maxScale) {
-     return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(new BigInteger(internalStorage), scale),
+     return HiveDecimal.enforcePrecisionScale(HiveDecimal.
+             create(new BigInteger(internalStorage), scale),
          maxPrecision, maxScale);
   }
 
@@ -133,20 +115,6 @@ public class HiveDecimalWritable implements WritableComparable<HiveDecimalWritab
     return getHiveDecimal().compareTo(that.getHiveDecimal());
   }
 
-  public static void writeToByteStream(Decimal128 dec, Output byteStream) {
-    HiveDecimal hd = HiveDecimal.create(dec.toBigDecimal());
-    LazyBinaryUtils.writeVInt(byteStream, hd.scale());
-    byte[] bytes = hd.unscaledValue().toByteArray();
-    LazyBinaryUtils.writeVInt(byteStream, bytes.length);
-    byteStream.write(bytes, 0, bytes.length);
-  }
-
-  public void writeToByteStream(RandomAccessOutput byteStream) {
-    LazyBinaryUtils.writeVInt(byteStream, scale);
-    LazyBinaryUtils.writeVInt(byteStream, internalStorage.length);
-    byteStream.write(internalStorage, 0, internalStorage.length);
-  }
-
   @Override
   public String toString() {
     return getHiveDecimal().toString();
@@ -190,4 +158,17 @@ public class HiveDecimalWritable implements WritableComparable<HiveDecimalWritab
   public int getScale() {
     return scale;
   }
+
+  public static
+  HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable,
+                                            int precision, int scale) {
+    if (writable == null) {
+      return null;
+    }
+
+    HiveDecimal dec =
+        HiveDecimal.enforcePrecisionScale(writable.getHiveDecimal(), precision,
+            scale);
+    return dec == null ? null : new HiveDecimalWritable(dec);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
index fcf1ac6..b8b1f59 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.io.Text;
 
 public class LazyHiveDecimal extends LazyPrimitive<LazyHiveDecimalObjectInspector, HiveDecimalWritable> {
@@ -89,7 +88,7 @@ public class LazyHiveDecimal extends LazyPrimitive<LazyHiveDecimalObjectInspecto
   }
 
   private HiveDecimal enforcePrecisionScale(HiveDecimal dec) {
-    return HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
+    return HiveDecimal.enforcePrecisionScale(dec, precision, scale);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
index 8c5b0b3..7588106 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
@@ -30,8 +30,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
-import org.apache.hadoop.hive.serde2.fast.DeserializeRead.ReadIntervalDayTimeResults;
-import org.apache.hadoop.hive.serde2.fast.DeserializeRead.ReadIntervalYearMonthResults;
+
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -44,17 +43,11 @@ import org.apache.hadoop.hive.serde2.lazy.LazyBinary;
 import org.apache.hadoop.hive.serde2.lazy.LazyByte;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
-import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
 import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyShort;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
@@ -494,7 +487,8 @@ public class LazySimpleDeserializeRead implements DeserializeRead {
         saveDecimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfos[fieldIndex];
         int precision = saveDecimalTypeInfo.getPrecision();
         int scale = saveDecimalTypeInfo.getScale();
-        saveDecimal = HiveDecimalUtils.enforcePrecisionScale(saveDecimal, precision, scale);
+        saveDecimal = HiveDecimal.enforcePrecisionScale(saveDecimal, precision,
+            scale);
         if (saveDecimal == null) {
           LOG.debug("Data not in the HiveDecimal data type range so converted to null. Given data is :"
               + byteData);
@@ -1059,4 +1053,4 @@ public class LazySimpleDeserializeRead implements DeserializeRead {
     return 0;
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
index e56e2ca..f8469a7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveDecimal.java
@@ -44,7 +44,7 @@ public class LazyBinaryHiveDecimal extends
 
   @Override
   public void init(ByteArrayRef bytes, int start, int length) {
-    data.setFromBytes(bytes.getData(), start, length);
+    LazyBinarySerDe.setFromBytes(bytes.getData(), start, length, data);
     HiveDecimal dec = data.getHiveDecimal(precision, scale);
     data = dec == null ? null : new HiveDecimalWritable(dec);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index a5dc5d8..41fe98a 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -316,6 +316,30 @@ public class LazyBinarySerDe extends AbstractSerDe {
     LazyBinaryUtils.writeVInt(byteStream, date.getDays());
   }
 
+  public static void setFromBytes(byte[] bytes, int offset, int length,
+                                  HiveDecimalWritable dec) {
+    LazyBinaryUtils.VInt vInt = new LazyBinaryUtils.VInt();
+    LazyBinaryUtils.readVInt(bytes, offset, vInt);
+    int scale = vInt.value;
+    offset += vInt.length;
+    LazyBinaryUtils.readVInt(bytes, offset, vInt);
+    offset += vInt.length;
+    byte[] internalStorage = dec.getInternalStorage();
+    if (internalStorage.length != vInt.value) {
+      internalStorage = new byte[vInt.value];
+    }
+    System.arraycopy(bytes, offset, internalStorage, 0, vInt.value);
+    dec.set(internalStorage, scale);
+  }
+
+  public static void writeToByteStream(RandomAccessOutput byteStream,
+                                       HiveDecimalWritable dec) {
+    LazyBinaryUtils.writeVInt(byteStream, dec.getScale());
+    byte[] internalStorage = dec.getInternalStorage();
+    LazyBinaryUtils.writeVInt(byteStream, internalStorage.length);
+    byteStream.write(internalStorage, 0, internalStorage.length);
+  }
+
   /**
    * A recursive function that serialize an object to a byte buffer based on its
    * object inspector.
@@ -457,7 +481,7 @@ public class LazyBinarySerDe extends AbstractSerDe {
         if (t == null) {
           return;
         }
-        t.writeToByteStream(byteStream);
+        writeToByteStream(byteStream, t);
         return;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
index a18e8b8..1f3806e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VLong;
@@ -913,7 +914,8 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
     if (tempHiveDecimalWritable == null) {
       tempHiveDecimalWritable = new HiveDecimalWritable();
     }
-    tempHiveDecimalWritable.setFromBytes(bytes, saveStart, length);
+    LazyBinarySerDe.setFromBytes(bytes, saveStart, length,
+        tempHiveDecimalWritable);
 
     saveDecimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfos[fieldIndex];
 
@@ -939,4 +941,4 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
     // Now return whether it is NULL or NOT NULL.
     return (saveDecimal == null);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index e0d9c0a..253b514 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hive.common.util.DateUtils;
@@ -733,7 +734,7 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
       hiveDecimalWritable = new HiveDecimalWritable();
     }
     hiveDecimalWritable.set(v);
-    hiveDecimalWritable.writeToByteStream(output);
+    LazyBinarySerDe.writeToByteStream(output, hiveDecimalWritable);
 
     fieldIndex++;
 
@@ -742,4 +743,4 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
       output.writeByte(nullOffset, nullByte);
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/29651cd3/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
index aa9e37a..cdd20bb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
@@ -18,35 +18,14 @@
 
 package org.apache.hadoop.hive.serde2.typeinfo;
 
-import java.math.BigDecimal;
-
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 
 public class HiveDecimalUtils {
 
   public static HiveDecimal enforcePrecisionScale(HiveDecimal dec, DecimalTypeInfo typeInfo) {
-    return enforcePrecisionScale(dec, typeInfo.precision(), typeInfo.scale());
-  }
-
-  public static HiveDecimal enforcePrecisionScale(HiveDecimal dec,int maxPrecision, int maxScale) {
-    if (dec == null) {
-      return null;
-    }
-
-    // Minor optimization, avoiding creating new objects.
-    if (dec.precision() - dec.scale() <= maxPrecision - maxScale && dec.scale() <= maxScale) {
-      return dec;
-    }
-
-    BigDecimal bd = HiveDecimal.enforcePrecisionScale(dec.bigDecimalValue(),
-        maxPrecision, maxScale);
-    if (bd == null) {
-      return null;
-    }
-
-    return HiveDecimal.create(bd);
+    return HiveDecimal.enforcePrecisionScale(dec, typeInfo.precision(),
+        typeInfo.scale());
   }
 
   public static HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable,
@@ -59,16 +38,6 @@ public class HiveDecimalUtils {
     return dec == null ? null : new HiveDecimalWritable(dec);
   }
 
-  public static HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable,
-      int precision, int scale) {
-    if (writable == null) {
-      return null;
-    }
-
-    HiveDecimal dec = enforcePrecisionScale(writable.getHiveDecimal(), precision, scale);
-    return dec == null ? null : new HiveDecimalWritable(dec);
-  }
-
   public static void validateParameter(int precision, int scale) {
     if (precision < 1 || precision > HiveDecimal.MAX_PRECISION) {
       throw new IllegalArgumentException("Decimal precision out of allowed range [1," +