You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ve...@apache.org on 2014/11/03 19:58:19 UTC

[2/3] git commit: DRILL-1528: Update HiveRecordReader to read Hive Decimal types with scale and precision.

DRILL-1528: Update HiveRecordReader to read Hive Decimal types with scale and precision.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/bf31a9a2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/bf31a9a2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/bf31a9a2

Branch: refs/heads/master
Commit: bf31a9a26d007af837c8fcbe926436f15a553ba0
Parents: a36ce65
Author: vkorukanti <ve...@gmail.com>
Authored: Fri Oct 10 17:15:09 2014 -0700
Committer: vkorukanti <ve...@gmail.com>
Committed: Mon Nov 3 10:31:09 2014 -0800

----------------------------------------------------------------------
 .../exec/store/hive/HiveFieldConverter.java     | 109 +++++++++++++--
 .../drill/exec/store/hive/HiveRecordReader.java | 131 +++++++++++++++----
 .../drill/exec/TestHiveProjectPushDown.java     |   8 +-
 .../exec/store/hive/HiveTestDataGenerator.java  |  30 ++++-
 4 files changed, 231 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/bf31a9a2/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
index e07d11e..82e038c 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
@@ -19,9 +19,19 @@ package org.apache.drill.exec.store.hive;
 
 import java.util.Map;
 
+import org.apache.drill.exec.expr.holders.Decimal18Holder;
+import org.apache.drill.exec.expr.holders.Decimal28SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal38SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal9Holder;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.util.DecimalUtility;
 import org.apache.drill.exec.vector.NullableBigIntVector;
 import org.apache.drill.exec.vector.NullableBitVector;
 import org.apache.drill.exec.vector.NullableDateVector;
+import org.apache.drill.exec.vector.NullableDecimal18Vector;
+import org.apache.drill.exec.vector.NullableDecimal28SparseVector;
+import org.apache.drill.exec.vector.NullableDecimal38SparseVector;
+import org.apache.drill.exec.vector.NullableDecimal9Vector;
 import org.apache.drill.exec.vector.NullableFloat4Vector;
 import org.apache.drill.exec.vector.NullableFloat8Vector;
 import org.apache.drill.exec.vector.NullableIntVector;
@@ -31,7 +41,6 @@ import org.apache.drill.exec.vector.NullableTinyIntVector;
 import org.apache.drill.exec.vector.NullableVarBinaryVector;
 import org.apache.drill.exec.vector.NullableVarCharVector;
 import org.apache.drill.exec.vector.ValueVector;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
@@ -47,6 +56,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspect
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.io.Text;
@@ -65,7 +75,6 @@ public abstract class HiveFieldConverter {
     primMap.put(PrimitiveCategory.BINARY, Binary.class);
     primMap.put(PrimitiveCategory.BOOLEAN, Boolean.class);
     primMap.put(PrimitiveCategory.BYTE, Byte.class);
-    primMap.put(PrimitiveCategory.DECIMAL, Decimal.class);
     primMap.put(PrimitiveCategory.DOUBLE, Double.class);
     primMap.put(PrimitiveCategory.FLOAT, Float.class);
     primMap.put(PrimitiveCategory.INT, Int.class);
@@ -78,13 +87,30 @@ public abstract class HiveFieldConverter {
   }
 
 
-  public static HiveFieldConverter create(TypeInfo typeInfo) throws IllegalAccessException, InstantiationException {
+  public static HiveFieldConverter create(TypeInfo typeInfo, FragmentContext fragmentContext)
+      throws IllegalAccessException, InstantiationException {
     switch (typeInfo.getCategory()) {
       case PRIMITIVE:
         final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
-        Class< ? extends HiveFieldConverter> clazz = primMap.get(pCat);
-        if (clazz != null) {
-          return clazz.newInstance();
+        if (pCat != PrimitiveCategory.DECIMAL) {
+          Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
+          if (clazz != null) {
+            return clazz.newInstance();
+          }
+        } else {
+          // For decimal, based on precision return appropriate converter.
+          DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
+          int precision = decimalTypeInfo.precision();
+          int scale = decimalTypeInfo.scale();
+          if (precision <= 9) {
+            return new Decimal9(precision, scale);
+          } else if (precision <= 18) {
+            return new Decimal18(precision, scale);
+          } else if (precision <= 28) {
+            return new Decimal28(precision, scale, fragmentContext);
+          } else {
+            return new Decimal38(precision, scale, fragmentContext);
+          }
         }
 
         HiveRecordReader.throwUnsupportedHiveDataTypeError(pCat.toString());
@@ -125,12 +151,75 @@ public abstract class HiveFieldConverter {
     }
   }
 
-  public static class Decimal extends HiveFieldConverter {
+  public static class Decimal9 extends HiveFieldConverter {
+    private final Decimal9Holder holder = new Decimal9Holder();
+
+    public Decimal9(int precision, int scale) {
+      holder.scale = scale;
+      holder.precision = precision;
+    }
+
+    @Override
+    public boolean setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
+      holder.value = DecimalUtility.getDecimal9FromBigDecimal(
+          ((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue(),
+          holder.scale, holder.precision);
+      return ((NullableDecimal9Vector) outputVV).getMutator().setSafe(outputIndex, holder);
+    }
+  }
+
+  public static class Decimal18 extends HiveFieldConverter {
+    private final Decimal18Holder holder = new Decimal18Holder();
+
+    public Decimal18(int precision, int scale) {
+      holder.scale = scale;
+      holder.precision = precision;
+    }
+
+    @Override
+    public boolean setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
+      holder.value = DecimalUtility.getDecimal18FromBigDecimal(
+          ((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue(),
+          holder.scale, holder.precision);
+      return ((NullableDecimal18Vector) outputVV).getMutator().setSafe(outputIndex, holder);
+    }
+  }
+
+  public static class Decimal28 extends HiveFieldConverter {
+    private final Decimal28SparseHolder holder = new Decimal28SparseHolder();
+
+    public Decimal28(int precision, int scale, FragmentContext context) {
+      holder.scale = scale;
+      holder.precision = precision;
+      holder.buffer = context.getManagedBuffer(Decimal28SparseHolder.nDecimalDigits * DecimalUtility.integerSize);
+      holder.start = 0;
+    }
+
+    @Override
+    public boolean setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
+      DecimalUtility.getSparseFromBigDecimal(
+          ((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue(),
+          holder.buffer, holder.start, holder.scale, holder.precision, Decimal28SparseHolder.nDecimalDigits);
+      return ((NullableDecimal28SparseVector) outputVV).getMutator().setSafe(outputIndex, holder);
+    }
+  }
+
+  public static class Decimal38 extends HiveFieldConverter {
+    private final Decimal38SparseHolder holder = new Decimal38SparseHolder();
+
+    public Decimal38(int precision, int scale, FragmentContext context) {
+      holder.scale = scale;
+      holder.precision = precision;
+      holder.buffer = context.getManagedBuffer(Decimal38SparseHolder.nDecimalDigits * DecimalUtility.integerSize);
+      holder.start = 0;
+    }
+
     @Override
     public boolean setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
-      final HiveDecimal value = ((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue);
-      final byte[] strBytes = value.toString().getBytes();
-      return ((NullableVarCharVector) outputVV).getMutator().setSafe(outputIndex, strBytes, 0, strBytes.length);
+      DecimalUtility.getSparseFromBigDecimal(
+          ((HiveDecimalObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue).bigDecimalValue(),
+          holder.buffer, holder.start, holder.scale, holder.precision, Decimal38SparseHolder.nDecimalDigits);
+      return ((NullableDecimal38SparseVector) outputVV).getMutator().setSafe(outputIndex, holder);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/bf31a9a2/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index 24b6924..f30aa1b 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.store.hive;
 
 import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.List;
@@ -28,19 +29,28 @@ import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.common.types.Types;
+import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.expr.holders.Decimal18Holder;
+import org.apache.drill.exec.expr.holders.Decimal28SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal38SparseHolder;
+import org.apache.drill.exec.expr.holders.Decimal9Holder;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.util.DecimalUtility;
 import org.apache.drill.exec.vector.BigIntVector;
 import org.apache.drill.exec.vector.BitVector;
 import org.apache.drill.exec.vector.DateVector;
+import org.apache.drill.exec.vector.Decimal18Vector;
+import org.apache.drill.exec.vector.Decimal28SparseVector;
+import org.apache.drill.exec.vector.Decimal38SparseVector;
+import org.apache.drill.exec.vector.Decimal9Vector;
 import org.apache.drill.exec.vector.Float4Vector;
 import org.apache.drill.exec.vector.Float8Vector;
 import org.apache.drill.exec.vector.IntVector;
@@ -63,6 +73,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -198,7 +210,7 @@ public class HiveRecordReader extends AbstractRecordReader {
 
         selectedColumnObjInspectors.add(fieldOI);
         selectedColumnTypes.add(typeInfo);
-        selectedColumnFieldConverters.add(HiveFieldConverter.create(typeInfo));
+        selectedColumnFieldConverters.add(HiveFieldConverter.create(typeInfo, fragmentContext));
       }
 
       if (isStarQuery()) {
@@ -243,14 +255,14 @@ public class HiveRecordReader extends AbstractRecordReader {
   public void setup(OutputMutator output) throws ExecutionSetupException {
     try {
       for (int i = 0; i < selectedColumnNames.size(); i++) {
-        MajorType type = Types.optional(getMinorTypeFromHiveTypeInfo(selectedColumnTypes.get(i)));
+        MajorType type = getMajorTypeFromHiveTypeInfo(selectedColumnTypes.get(i), true);
         MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedColumnNames.get(i)), type);
         Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
         vectors.add(output.addField(field, vvClass));
       }
 
       for (int i = 0; i < selectedPartitionNames.size(); i++) {
-        MajorType type = Types.required(getMinorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i)));
+        MajorType type = getMajorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i), false);
         MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedPartitionNames.get(i)), type);
         Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
         pVectors.add(output.addField(field, vvClass));
@@ -340,38 +352,52 @@ public class HiveRecordReader extends AbstractRecordReader {
       case BINARY:
         return TypeProtos.MinorType.VARBINARY;
       case BOOLEAN:
-        return TypeProtos.MinorType.BIT;
+        return MinorType.BIT;
       case BYTE:
-        return TypeProtos.MinorType.TINYINT;
-      case DECIMAL:
-        return TypeProtos.MinorType.VARCHAR;
+        return MinorType.TINYINT;
+      case DECIMAL: {
+        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
+        return DecimalUtility.getDecimalDataType(decimalTypeInfo.precision());
+      }
       case DOUBLE:
-        return TypeProtos.MinorType.FLOAT8;
+        return MinorType.FLOAT8;
       case FLOAT:
-        return TypeProtos.MinorType.FLOAT4;
+        return MinorType.FLOAT4;
       case INT:
-        return TypeProtos.MinorType.INT;
+        return MinorType.INT;
       case LONG:
-        return TypeProtos.MinorType.BIGINT;
+        return MinorType.BIGINT;
       case SHORT:
-        return TypeProtos.MinorType.SMALLINT;
+        return MinorType.SMALLINT;
       case STRING:
       case VARCHAR:
-        return TypeProtos.MinorType.VARCHAR;
+        return MinorType.VARCHAR;
       case TIMESTAMP:
-        return TypeProtos.MinorType.TIMESTAMP;
+        return MinorType.TIMESTAMP;
       case DATE:
-        return TypeProtos.MinorType.DATE;
+        return MinorType.DATE;
     }
 
     throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString());
     return null;
   }
 
-  public static MinorType getMinorTypeFromHiveTypeInfo(TypeInfo typeInfo) {
+  public static MajorType getMajorTypeFromHiveTypeInfo(TypeInfo typeInfo, boolean nullable) {
     switch (typeInfo.getCategory()) {
-      case PRIMITIVE:
-        return getMinorTypeFromHivePrimitiveTypeInfo(((PrimitiveTypeInfo) typeInfo));
+      case PRIMITIVE: {
+        PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
+        MinorType minorType = getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo);
+        MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType)
+            .setMode((nullable ? DataMode.OPTIONAL : DataMode.REQUIRED));
+
+        if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
+          DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
+          typeBuilder.setPrecision(decimalTypeInfo.precision())
+              .setScale(decimalTypeInfo.scale()).build();
+        }
+
+        return typeBuilder.build();
+      }
 
       case LIST:
       case MAP:
@@ -489,11 +515,8 @@ public class HiveRecordReader extends AbstractRecordReader {
           break;
         }
         case DECIMAL: {
-          VarCharVector v = (VarCharVector) vector;
-          byte[] value = ((HiveDecimal) val).toString().getBytes();
-          for (int j = 0; j < recordCount; j++) {
-            v.getMutator().setSafe(j, value);
-          }
+          populateDecimalPartitionVector((DecimalTypeInfo)selectedPartitionTypes.get(i), vector,
+              ((HiveDecimal)val).bigDecimalValue(), recordCount);
           break;
         }
         default:
@@ -503,6 +526,57 @@ public class HiveRecordReader extends AbstractRecordReader {
     }
   }
 
+  private void populateDecimalPartitionVector(DecimalTypeInfo typeInfo, ValueVector vector, BigDecimal bigDecimal,
+      int recordCount) {
+    int precision = typeInfo.precision();
+    int scale = typeInfo.scale();
+    if (precision <= 9) {
+      Decimal9Holder holder = new Decimal9Holder();
+      holder.scale = scale;
+      holder.precision = precision;
+      holder.value = DecimalUtility.getDecimal9FromBigDecimal(bigDecimal, precision, scale);
+      Decimal9Vector v = (Decimal9Vector) vector;
+      for (int j = 0; j < recordCount; j++) {
+        v.getMutator().setSafe(j, holder);
+      }
+    } else if (precision <= 18) {
+      Decimal18Holder holder = new Decimal18Holder();
+      holder.scale = scale;
+      holder.precision = precision;
+      holder.value = DecimalUtility.getDecimal18FromBigDecimal(bigDecimal, precision, scale);
+      Decimal18Vector v = (Decimal18Vector) vector;
+      for (int j = 0; j < recordCount; j++) {
+        v.getMutator().setSafe(j, holder);
+      }
+    } else if (precision <= 28) {
+      Decimal28SparseHolder holder = new Decimal28SparseHolder();
+      holder.scale = scale;
+      holder.precision = precision;
+      holder.buffer = fragmentContext.getManagedBuffer(
+          Decimal28SparseHolder.nDecimalDigits * DecimalUtility.integerSize);
+      holder.start = 0;
+      DecimalUtility.getSparseFromBigDecimal(bigDecimal, holder.buffer, 0, scale, precision,
+          Decimal28SparseHolder.nDecimalDigits);
+      Decimal28SparseVector v = (Decimal28SparseVector) vector;
+      for (int j = 0; j < recordCount; j++) {
+        v.getMutator().setSafe(j, holder);
+      }
+    } else {
+      Decimal38SparseHolder holder = new Decimal38SparseHolder();
+      holder.scale = scale;
+      holder.precision = precision;
+      holder.buffer = fragmentContext.getManagedBuffer(
+          Decimal38SparseHolder.nDecimalDigits * DecimalUtility.integerSize);
+      holder.start = 0;
+      DecimalUtility.getSparseFromBigDecimal(bigDecimal, holder.buffer, 0, scale, precision,
+          Decimal38SparseHolder.nDecimalDigits);
+      Decimal38SparseVector v = (Decimal38SparseVector) vector;
+      for (int j = 0; j < recordCount; j++) {
+        v.getMutator().setSafe(j, holder);
+      }
+    }
+  }
+
   /** Partition value is received in string format. Convert it into appropriate object based on the type. */
   private Object convertPartitionType(TypeInfo typeInfo, String value) {
     if (typeInfo.getCategory() != Category.PRIMITIVE) {
@@ -519,8 +593,11 @@ public class HiveRecordReader extends AbstractRecordReader {
         return Boolean.parseBoolean(value);
       case BYTE:
         return Byte.parseByte(value);
-      case DECIMAL:
-        return HiveDecimal.create(value);
+      case DECIMAL: {
+        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
+        return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(value),
+            decimalTypeInfo.precision(), decimalTypeInfo.scale());
+      }
       case DOUBLE:
         return Double.parseDouble(value);
       case FLOAT:

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/bf31a9a2/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHiveProjectPushDown.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHiveProjectPushDown.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHiveProjectPushDown.java
index 07ca82f..4479c8b 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHiveProjectPushDown.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/TestHiveProjectPushDown.java
@@ -65,8 +65,8 @@ public class TestHiveProjectPushDown extends PlanTestBase {
 
   @Test
   public void testMultiplePartitionColumnsProject() throws Exception {
-    String query = "SELECT double_part as dbl_p, decimal_part as dec_p FROM hive.`default`.readtest";
-    String expectedColNames = " \"columns\" : [ \"`double_part`\", \"`decimal_part`\" ]";
+    String query = "SELECT double_part as dbl_p, decimal0_part as dec_p FROM hive.`default`.readtest";
+    String expectedColNames = " \"columns\" : [ \"`double_part`\", \"`decimal0_part`\" ]";
 
     testHelper(query, expectedColNames, 2);
   }
@@ -74,9 +74,9 @@ public class TestHiveProjectPushDown extends PlanTestBase {
   @Test
   public void testPartitionAndRegularColumnProjectColumn() throws Exception {
     String query = "SELECT boolean_field as b_f, tinyint_field as ti_f, " +
-        "double_part as dbl_p, decimal_part as dec_p FROM hive.`default`.readtest";
+        "double_part as dbl_p, decimal0_part as dec_p FROM hive.`default`.readtest";
     String expectedColNames = " \"columns\" : [ \"`boolean_field`\", \"`tinyint_field`\", " +
-        "\"`double_part`\", \"`decimal_part`\" ]";
+        "\"`double_part`\", \"`decimal0_part`\" ]";
 
     testHelper(query, expectedColNames, 2);
   }

http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/bf31a9a2/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
index 2eb4234..c55b714 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
@@ -120,7 +120,11 @@ public class HiveTestDataGenerator {
         "  binary_field BINARY," +
         "  boolean_field BOOLEAN," +
         "  tinyint_field TINYINT," +
-        "  decimal_field DECIMAL(38, 3)," +
+        "  decimal0_field DECIMAL," +
+        "  decimal9_field DECIMAL(6, 2)," +
+        "  decimal18_field DECIMAL(15, 5)," +
+        "  decimal28_field DECIMAL(23, 1)," +
+        "  decimal38_field DECIMAL(30, 3)," +
         "  double_field DOUBLE," +
         "  float_field FLOAT," +
         "  int_field INT," +
@@ -134,7 +138,11 @@ public class HiveTestDataGenerator {
         "  binary_part BINARY," +
         "  boolean_part BOOLEAN," +
         "  tinyint_part TINYINT," +
-        "  decimal_part DECIMAL(38, 3)," +
+        "  decimal0_part DECIMAL," +
+        "  decimal9_part DECIMAL(6, 2)," +
+        "  decimal18_part DECIMAL(15, 5)," +
+        "  decimal28_part DECIMAL(23, 1)," +
+        "  decimal38_part DECIMAL(30, 3)," +
         "  double_part DOUBLE," +
         "  float_part FLOAT," +
         "  int_part INT," +
@@ -153,7 +161,11 @@ public class HiveTestDataGenerator {
         "  binary_part='binary', " +
         "  boolean_part='true', " +
         "  tinyint_part='64', " +
-        "  decimal_part='3489423929323435243', " +
+        "  decimal0_part='36.9', " +
+        "  decimal9_part='36.9', " +
+        "  decimal18_part='3289379872.945645', " +
+        "  decimal28_part='39579334534534.35345', " +
+        "  decimal38_part='363945093845093890.9', " +
         "  double_part='8.345', " +
         "  float_part='4.67', " +
         "  int_part='123456', " +
@@ -170,7 +182,11 @@ public class HiveTestDataGenerator {
         "  binary_part='binary', " +
         "  boolean_part='true', " +
         "  tinyint_part='64', " +
-        "  decimal_part='3489423929323435243', " +
+        "  decimal0_part='36.9', " +
+        "  decimal9_part='36.9', " +
+        "  decimal18_part='3289379872.945645', " +
+        "  decimal28_part='39579334534534.35345', " +
+        "  decimal38_part='363945093845093890.9', " +
         "  double_part='8.345', " +
         "  float_part='4.67', " +
         "  int_part='123456', " +
@@ -275,8 +291,10 @@ public class HiveTestDataGenerator {
     File file = getTempFile();
 
     PrintWriter printWriter = new PrintWriter(file);
-    printWriter.println("YmluYXJ5ZmllbGQ=,false,34,3489423929323435243,8.345,4.67,123456,234235,3455,stringfield,varcharfield,2013-07-05 17:01:00,2013-07-05");
-    printWriter.println(",,,,,,,,,,,,");
+    printWriter.println("YmluYXJ5ZmllbGQ=,false,34,65.99,2347.923,2758725827.9999,29375892739852.7689," +
+        "89853749534593985.7834783,8.345,4.67,123456,234235,3455,stringfield,varcharfield," +
+        "2013-07-05 17:01:00,2013-07-05");
+    printWriter.println(",,,,,,,,,,,,,,,,");
     printWriter.close();
 
     return file.getPath();