You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2016/12/22 08:32:39 UTC

[10/10] hive git commit: HIVE-15335: Fast Decimal (Matt McCline, reviewed by Sergey Shelukhin, Prasanth Jayachandran, Owen O'Malley)

HIVE-15335: Fast Decimal (Matt McCline, reviewed by Sergey Shelukhin, Prasanth Jayachandran, Owen O'Malley)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4ba713cc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4ba713cc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4ba713cc

Branch: refs/heads/master
Commit: 4ba713ccd85c3706d195aeef9476e6e6363f1c21
Parents: 597ca1b
Author: Matt McCline <mm...@hortonworks.com>
Authored: Thu Dec 22 00:32:08 2016 -0800
Committer: Matt McCline <mm...@hortonworks.com>
Committed: Thu Dec 22 00:32:08 2016 -0800

----------------------------------------------------------------------
 .../hive/common/type/HiveDecimalTestBase.java   |  561 ++
 .../hive/common/type/TestHiveDecimal.java       |  216 -
 .../TestHiveDecimalOrcSerializationUtils.java   |  378 +
 .../apache/orc/impl/ColumnStatisticsImpl.java   |   50 +-
 .../orc/impl/ConvertTreeReaderFactory.java      |   81 +-
 .../org/apache/orc/impl/TreeReaderFactory.java  |   30 +-
 .../java/org/apache/orc/impl/WriterImpl.java    |   33 +-
 .../org/apache/orc/TestColumnStatistics.java    |   13 +-
 .../UDAFTemplates/VectorUDAFMinMaxDecimal.txt   |   45 +-
 .../ql/exec/vector/VectorDeserializeRow.java    |    3 +-
 .../hive/ql/exec/vector/VectorExtractRow.java   |    3 +-
 .../ql/exec/vector/VectorHashKeyWrapper.java    |    3 +-
 .../exec/vector/VectorHashKeyWrapperBatch.java  |    2 +-
 .../hive/ql/exec/vector/VectorSerializeRow.java |    7 +-
 .../ql/exec/vector/VectorizationContext.java    |  174 +-
 .../expressions/CastDecimalToBoolean.java       |    7 +-
 .../vector/expressions/CastDecimalToChar.java   |    4 +-
 .../vector/expressions/CastDecimalToDouble.java |    2 +-
 .../vector/expressions/CastDecimalToLong.java   |   44 +-
 .../vector/expressions/CastDecimalToString.java |   23 +-
 .../expressions/CastDecimalToTimestamp.java     |   11 +-
 .../expressions/CastDecimalToVarChar.java       |    4 +-
 .../vector/expressions/CastDoubleToDecimal.java |   10 +-
 .../vector/expressions/DecimalColumnInList.java |   21 +-
 .../ql/exec/vector/expressions/DecimalUtil.java |  281 +-
 .../expressions/FilterDecimalColumnInList.java  |   18 +-
 .../expressions/FilterStructColumnInList.java   |    2 +-
 .../vector/expressions/FuncDecimalToLong.java   |   19 +-
 .../vector/expressions/StructColumnInList.java  |    2 +-
 .../aggregates/VectorUDAFAvgDecimal.java        |   95 +-
 .../aggregates/VectorUDAFSumDecimal.java        |  148 +-
 .../mapjoin/VectorMapJoinCommonOperator.java    |   11 +-
 .../ql/io/parquet/write/DataWritableWriter.java |    3 +-
 .../hive/ql/io/sarg/ConvertAstToSearchArg.java  |    6 +-
 .../org/apache/hadoop/hive/ql/udf/UDFLog.java   |    4 +-
 .../org/apache/hadoop/hive/ql/udf/UDFMath.java  |    2 +-
 .../org/apache/hadoop/hive/ql/udf/UDFSign.java  |    6 +-
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToByte.java    |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToDouble.java  |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToFloat.java   |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToInteger.java |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToLong.java    |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToShort.java   |    4 +-
 .../hive/ql/udf/generic/GenericUDAFSum.java     |   44 +-
 .../hive/ql/udf/generic/GenericUDFAbs.java      |    3 +-
 .../hive/ql/udf/generic/GenericUDFBRound.java   |    7 +-
 .../hive/ql/udf/generic/GenericUDFCeil.java     |    4 +-
 .../hive/ql/udf/generic/GenericUDFFloor.java    |    4 +-
 .../ql/udf/generic/GenericUDFOPNegative.java    |    4 +-
 .../udf/generic/GenericUDFOPNumericMinus.java   |    7 +-
 .../ql/udf/generic/GenericUDFOPNumericPlus.java |    9 +-
 .../hive/ql/udf/generic/GenericUDFRound.java    |   16 +-
 .../exec/vector/TestVectorGroupByOperator.java  |   12 +-
 .../hive/ql/exec/vector/TestVectorSerDeRow.java |  226 +-
 .../ql/exec/vector/VectorRandomRowSource.java   |   26 +-
 .../vector/expressions/TestDecimalUtil.java     |    1 -
 .../vector/expressions/TestVectorTypeCasts.java |   28 +-
 .../mapjoin/fast/CheckFastRowHashMap.java       |   27 +-
 .../exec/vector/mapjoin/fast/VerifyFastRow.java |    5 +-
 .../vector/util/VectorizedRowGroupGenUtil.java  |    6 +-
 .../test/results/clientpositive/decimal_2.q.out |    4 +-
 ...ema_evol_orc_nonvec_part_all_primitive.q.out |    4 +-
 ...schema_evol_orc_vec_part_all_primitive.q.out |    4 +-
 ...ma_evol_text_nonvec_part_all_primitive.q.out |    8 +-
 ...chema_evol_text_vec_part_all_primitive.q.out |    8 +-
 ...ma_evol_text_vecrow_part_all_primitive.q.out |    8 +-
 .../clientpositive/llap/vector_decimal_2.q.out  |    4 +-
 .../llap/vector_decimal_expressions.q.out       |   20 +-
 .../vector_decimal_expressions.q.out            |   20 +-
 .../hadoop/hive/serde2/avro/AvroSerdeUtils.java |    5 +-
 .../binarysortable/BinarySortableSerDe.java     |  138 +-
 .../fast/BinarySortableDeserializeRead.java     |   35 +-
 .../fast/BinarySortableSerializeWrite.java      |   26 +-
 .../hadoop/hive/serde2/fast/SerializeWrite.java |    5 +
 .../hive/serde2/lazy/LazyHiveDecimal.java       |   72 +-
 .../lazy/fast/LazySimpleDeserializeRead.java    |   23 +-
 .../lazy/fast/LazySimpleSerializeWrite.java     |   26 +-
 .../LazyHiveDecimalObjectInspector.java         |    9 +-
 .../lazybinary/LazyBinaryHiveDecimal.java       |    6 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java |   74 +-
 .../fast/LazyBinaryDeserializeRead.java         |   28 +-
 .../fast/LazyBinarySerializeWrite.java          |   56 +-
 .../objectinspector/ObjectInspectorUtils.java   |    2 +
 .../PrimitiveObjectInspectorUtils.java          |   58 +-
 ...tableConstantHiveDecimalObjectInspector.java |   17 +-
 .../hive/serde2/typeinfo/HiveDecimalUtils.java  |    5 +-
 .../hive/serde2/SerdeRandomRowSource.java       |   28 +-
 .../apache/hadoop/hive/serde2/VerifyFast.java   |    2 +-
 .../hive/serde2/avro/TestAvroSerializer.java    |    8 +-
 .../binarysortable/MyTestPrimitiveClass.java    |   16 +-
 .../binarysortable/TestBinarySortableFast.java  |   43 +-
 .../hive/serde2/io/TestHiveDecimalWritable.java |  250 -
 .../hive/common/type/FastHiveDecimal.java       |  741 ++
 .../hive/common/type/FastHiveDecimalImpl.java   | 9149 ++++++++++++++++++
 .../hadoop/hive/common/type/HiveDecimal.java    | 1487 ++-
 .../hadoop/hive/common/type/HiveDecimalV1.java  |  386 +
 .../hive/common/type/HiveDecimalVersionV1.java  |   33 +
 .../hive/common/type/HiveDecimalVersionV2.java  |   33 +
 .../hadoop/hive/common/type/RandomTypeUtil.java |   27 +-
 .../ql/exec/vector/DecimalColumnVector.java     |   35 +-
 .../hadoop/hive/ql/util/TimestampUtils.java     |   83 +-
 .../hive/serde2/io/HiveDecimalWritable.java     |  956 +-
 .../hive/serde2/io/HiveDecimalWritableV1.java   |  191 +
 .../serde2/io/HiveDecimalWritableVersionV1.java |   33 +
 .../serde2/io/HiveDecimalWritableVersionV2.java |   33 +
 .../org/apache/hadoop/hive/VersionTestBase.java |  322 +
 .../hive/common/type/HiveDecimalTestBase.java   |  558 ++
 .../hive/common/type/TestHiveDecimal.java       | 3404 +++++++
 .../common/type/TestHiveDecimalVersion.java     |   67 +
 .../hive/serde2/io/TestHiveDecimalWritable.java |   52 +
 .../io/TestHiveDecimalWritableVersion.java      |   71 +
 112 files changed, 19789 insertions(+), 1663 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java b/common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java
new file mode 100644
index 0000000..dee49f9
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java
@@ -0,0 +1,561 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.util.Random;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+
+// A COPY of the one in storage-api since currently sharing test classes isn't supported in
+// our build.
+
+public class HiveDecimalTestBase {
+
+  public static int POUND_FACTOR = 1000;
+
+  public static enum BigDecimalFlavor {
+    NORMAL_RANGE,
+    FRACTIONS_ONLY,
+    NEGATIVE_SCALE,
+    LONG_TAIL
+  }
+
+  public static enum BigDecimalPairFlavor {
+    RANDOM,
+    NEAR,
+    INVERSE
+  }
+
+  public BigDecimal randHiveBigDecimal(Random r, String digitAlphabet, BigDecimalFlavor bigDecimalFlavor) {
+    switch (bigDecimalFlavor) {
+    case NORMAL_RANGE:
+      return randHiveBigDecimalNormalRange(r, digitAlphabet);
+    case FRACTIONS_ONLY:
+      return randHiveBigDecimalFractionsOnly(r, digitAlphabet);
+    case NEGATIVE_SCALE:
+      return randHiveBigDecimalNegativeScale(r, digitAlphabet);
+    case LONG_TAIL:
+      return randHiveBigDecimalLongTail(r, digitAlphabet);
+    default:
+      throw new RuntimeException("Unexpected big decimal flavor " + bigDecimalFlavor);
+    }
+  }
+
+  public BigDecimal[] randHiveBigDecimalPair(Random r, String digitAlphabet,
+      BigDecimalFlavor bigDecimalFlavor, BigDecimalPairFlavor bigDecimalPairFlavor) {
+    BigDecimal[] pair = new BigDecimal[2];
+    BigDecimal bigDecimal1 = randHiveBigDecimal(r, digitAlphabet, bigDecimalFlavor);
+    pair[0] = bigDecimal1;
+
+    BigDecimal bigDecimal2;
+    switch (bigDecimalPairFlavor) {
+    case RANDOM:
+      bigDecimal2 = randHiveBigDecimal(r, digitAlphabet, bigDecimalFlavor);
+      break;
+    case NEAR:
+      bigDecimal2 = randHiveBigDecimalNear(r, bigDecimal1);
+      break;
+    case INVERSE:
+      bigDecimal2 = randHiveBigDecimalNear(r, bigDecimal1);
+      break;
+    default:
+      throw new RuntimeException("Unexpected big decimal pair flavor " + bigDecimalPairFlavor);
+    }
+    pair[1] = bigDecimal2;
+    return pair;
+  }
+
+  public BigDecimal randHiveBigDecimalNormalRange(Random r, String digitAlphabet) {
+    String digits = RandomTypeUtil.getRandString(r, digitAlphabet, 1 + r.nextInt(38));
+    BigInteger bigInteger = new BigInteger(digits);
+    boolean negated = false;
+    if (r.nextBoolean()) {
+      bigInteger = bigInteger.negate();
+      negated = true;
+    }
+    int scale = 0 + r.nextInt(38 + 1);
+    return new BigDecimal(bigInteger, scale);
+  }
+
+  public BigDecimal randHiveBigDecimalNegativeScale(Random r, String digitAlphabet) {
+    String digits = RandomTypeUtil.getRandString(r, digitAlphabet, 1 + r.nextInt(38));
+    BigInteger bigInteger = new BigInteger(digits);
+    boolean negated = false;
+    if (r.nextBoolean()) {
+      bigInteger = bigInteger.negate();
+      negated = true;
+    }
+    int scale = 0 + (r.nextBoolean() ? 0 : r.nextInt(38 + 1));
+    if (r.nextBoolean()) {
+      scale = -scale;
+    }
+    return new BigDecimal(bigInteger, scale);
+  }
+
+  public BigDecimal randHiveBigDecimalLongTail(Random r, String digitAlphabet) {
+    int scale = 0 + r.nextInt(38 + 20);
+    final int maxDigits = 38 + (scale == 0 ? 0 : 20);
+    String digits = RandomTypeUtil.getRandString(r, digitAlphabet, 1 + r.nextInt(maxDigits));
+    BigInteger bigInteger = new BigInteger(digits);
+    boolean negated = false;
+    if (r.nextBoolean()) {
+      bigInteger = bigInteger.negate();
+      negated = true;
+    }
+    return new BigDecimal(bigInteger, scale);
+  }
+
+  public BigDecimal randHiveBigDecimalFractionsOnly(Random r, String digitAlphabet) {
+    int scale = 1 + r.nextInt(38 + 1);
+    String digits = RandomTypeUtil.getRandString(r, digitAlphabet, 1 + r.nextInt(scale));
+    BigInteger bigInteger = new BigInteger(digits);
+    boolean negated = false;
+    if (r.nextBoolean()) {
+      bigInteger = bigInteger.negate();
+      negated = true;
+    }
+    return new BigDecimal(bigInteger, scale);
+  }
+
+  public BigDecimal randHiveBigDecimalNear(Random r, BigDecimal bigDecimal) {
+
+    int scale = bigDecimal.scale();
+    int delta = r.nextInt(10);
+    if (r.nextBoolean()) {
+      return bigDecimal.add(new BigDecimal(BigInteger.valueOf(delta), scale));
+    } else {
+      return bigDecimal.subtract(new BigDecimal(BigInteger.valueOf(delta), scale));
+    }
+  }
+
+  public BigDecimal randHiveBigDecimalInverse(Random r, BigDecimal bigDecimal) {
+    if (bigDecimal.signum() == 0) {
+      return bigDecimal;
+    }
+    return BigDecimal.ONE.divide(bigDecimal);
+  }
+
+  public BigInteger randHiveBigInteger(Random r, String digitAlphabet) {
+    String digits = RandomTypeUtil.getRandString(r, digitAlphabet, 1 + r.nextInt(38));
+    BigInteger bigInteger = new BigInteger(digits);
+    boolean negated = false;
+    if (r.nextBoolean()) {
+      bigInteger = bigInteger.negate();
+      negated = true;
+    }
+    return bigInteger;
+  }
+
+  public boolean isTenPowerBug(String string) {
+    // // System.out.println("TEST_IS_TEN_TO_38_STRING isTenPowerBug " + string);
+    if (string.charAt(0) == '-') {
+      string = string.substring(1);
+    }
+    int index = string.indexOf('.');
+    if (index != -1) {
+      if (index == 0) {
+        string = string.substring(1);
+      } else {
+        string = string.substring(0, index) + string.substring(index + 1);
+      }
+    }
+    // // System.out.println("TEST_IS_TEN_TO_38_STRING isTenPowerBug " + string);
+    return string.equals("100000000000000000000000000000000000000");
+  }
+
+  //------------------------------------------------------------------------------------------------
+
+  public static String[] specialDecimalStrings = new String[] {
+    "0",
+    "1",
+    "-1",
+    "10",
+    "-10",
+    "100",
+    "-100",
+    "127",                                          // Byte.MAX_VALUE
+    "127.1",
+    "127.0008",
+    "127.49",
+    "127.5",
+    "127.9999999999999999999",
+    "-127",
+    "-127.1",
+    "-127.0008",
+    "-127.49",
+    "-127.5",
+    "-127.999999",
+    "128",
+    "128.1",
+    "128.0008",
+    "128.49",
+    "128.5",
+    "128.9999999999999999999",
+    "-128",                                         // Byte.MIN_VALUE
+    "-128.1",
+    "-128.0008",
+    "-128.49",
+    "-128.5",
+    "-128.999",
+    "129",
+    "129.1",
+    "-129",
+    "-129.1",
+    "1000",
+    "-1000",
+    "10000",
+    "-10000",
+    "32767",                                        // Short.MAX_VALUE
+    "32767.1",
+    "32767.0008",
+    "32767.49",
+    "32767.5",
+    "32767.99999999999",
+    "-32767",
+    "-32767.1",
+    "-32767.0008",
+    "-32767.49",
+    "-32767.5",
+    "-32767.9",
+    "32768",
+    "32768.1",
+    "32768.0008",
+    "32768.49",
+    "32768.5",
+    "32768.9999999999",
+    "-32768",                                       // Short.MIN_VALUE
+    "-32768.1",
+    "-32768.0008",
+    "-32768.49",
+    "-32768.5",
+    "-32768.9999999",
+    "32769",
+    "32769.1",
+    "-32769",
+    "-32769.1",
+    "100000",
+    "-100000",
+    "1000000",
+    "-1000000",
+    "10000000",
+    "-10000000",
+    "100000000",
+    "99999999",                                     // 10^8 - 1
+    "-99999999",
+    "-100000000",
+    "1000000000",
+    "-1000000000",
+    "2147483647",                                  // Integer.MAX_VALUE
+    "2147483647.1",
+    "2147483647.0008",
+    "2147483647.49",
+    "2147483647.5",
+    "2147483647.9999999999",
+    "-2147483647",
+    "-2147483647.1",
+    "-2147483647.0008",
+    "-2147483647.49",
+    "-2147483647.5",
+    "-2147483647.9999999999999999999",
+    "2147483648",
+    "2147483648.1",
+    "2147483648.0008",
+    "2147483648.49",
+    "2147483648.5",
+    "2147483648.9",
+    "-2147483648",                                 // Integer.MIN_VALUE
+    "-2147483648.1",
+    "-2147483648.0008",
+    "-2147483648.49",
+    "-2147483648.5",
+    "-2147483648.999",
+    "2147483649",
+    "2147483649.1",
+    "-2147483649",
+    "-2147483649.1",
+    "10000000000",
+    "-10000000000",
+    "100000000000",
+    "-100000000000",
+    "1000000000000",
+    "-1000000000000",
+    "10000000000000",
+    "-10000000000000",
+    "100000000000000",
+    "-100000000000000",
+    "999999999999999",
+    "-999999999999999",
+    "1000000000000000",                            // 10^15
+    "-1000000000000000",
+    "9999999999999999",                            // 10^16 - 1
+    "-9999999999999999",
+    "10000000000000000",                           // 10^16
+    "-10000000000000000",
+    "100000000000000000",
+    "-100000000000000000",
+    "1000000000000000000",
+    "-1000000000000000000",
+    "9223372036854775807",                         // Long.MAX_VALUE
+    "9223372036854775807.1",
+    "9223372036854775807.0008",
+    "9223372036854775807.49",
+    "9223372036854775807.5",
+    "9223372036854775807.9",
+    "-9223372036854775807",
+    "-9223372036854775807.1",
+    "-9223372036854775807.0008",
+    "-9223372036854775807.49",
+    "-9223372036854775807.5",
+    "-9223372036854775807.9999999999999999999",
+    "-9223372036854775808",
+    "-9223372036854775808.1",
+    "9223372036854775808",
+    "9223372036854775808.1",
+    "9223372036854775808.0008",
+    "9223372036854775808.49",
+    "9223372036854775808.5",
+    "9223372036854775808.9",
+    "9223372036854775809",
+    "9223372036854775809.1",
+    "-9223372036854775808",                        // Long.MIN_VALUE
+    "-9223372036854775808.1",
+    "-9223372036854775808.0008",
+    "-9223372036854775808.49",
+    "-9223372036854775808.5",
+    "-9223372036854775808.9999999",
+    "9223372036854775809",
+    "9223372036854775809.1",
+    "-9223372036854775809",
+    "-9223372036854775809.1",
+    "10000000000000000000000000000000",            // 10^31
+    "-10000000000000000000000000000000",
+    "99999999999999999999999999999999",            // 10^32 - 1
+    "-99999999999999999999999999999999", 
+    "100000000000000000000000000000000",           // 10^32
+    "-100000000000000000000000000000000",
+    "10000000000000000000000000000000000000",      // 10^37
+    "-10000000000000000000000000000000000000",
+    "99999999999999999999999999999999999999",      // 10^38 - 1
+    "-99999999999999999999999999999999999999",
+    "100000000000000000000000000000000000000",     // 10^38
+    "-100000000000000000000000000000000000000", 
+    "1000000000000000000000000000000000000000",    // 10^39
+    "-1000000000000000000000000000000000000000",
+
+    "18446744073709551616",                        // Unsigned 64 max.
+    "-18446744073709551616",
+    "340282366920938463463374607431768211455",     // 2^128 - 1
+    "-340282366920938463463374607431768211455",
+
+    "0.999999999999999",
+    "-0.999999999999999",
+    "0.0000000000000001",                          // 10^-15
+    "-0.0000000000000001",
+    "0.9999999999999999",
+    "-0.9999999999999999",
+    "0.00000000000000001",                         // 10^-16
+    "-0.00000000000000001",
+    "0.00000000000000000000000000000001",          // 10^-31
+    "-0.00000000000000000000000000000001",
+    "0.99999999999999999999999999999999",          // 10^-32 + 1
+    "-0.99999999999999999999999999999999",
+    "0.000000000000000000000000000000001",         // 10^-32
+    "-0.000000000000000000000000000000001",
+    "0.00000000000000000000000000000000000001",    // 10^-37
+    "-0.00000000000000000000000000000000000001",
+    "0.99999999999999999999999999999999999999",    // 10^-38 + 1
+    "-0.99999999999999999999999999999999999999",
+    "0.000000000000000000000000000000000000001",   // 10^-38
+    "-0.000000000000000000000000000000000000001",
+    "0.0000000000000000000000000000000000000001",  // 10^-39
+    "-0.0000000000000000000000000000000000000001",
+    "0.0000000000000000000000000000000000000005",  // 10^-39  (rounds)
+    "-0.0000000000000000000000000000000000000005",
+    "0.340282366920938463463374607431768211455",   // (2^128 - 1) * 10^-39
+    "-0.340282366920938463463374607431768211455",
+    "0.000000000000000000000000000000000000001",   // 10^-38
+    "-0.000000000000000000000000000000000000001",
+    "0.000000000000000000000000000000000000005",   // 10^-38
+    "-0.000000000000000000000000000000000000005",
+
+    "234.79",
+    "342348.343",
+    "12.25",
+    "-12.25",
+    "72057594037927935",                           // 2^56 - 1
+    "-72057594037927935",
+    "72057594037927936",                           // 2^56
+    "-72057594037927936",
+    "5192296858534827628530496329220095",          // 2^56 * 2^56 - 1
+    "-5192296858534827628530496329220095",
+    "5192296858534827628530496329220096",          // 2^56 * 2^56
+    "-5192296858534827628530496329220096",
+
+    "54216721532321902598.70",
+    "-906.62545207002374150309544832320",
+    "-0.0709351061072",
+    "1460849063411925.53",
+    "8.809130E-33",
+    "-4.0786300706013636202E-20",
+    "-3.8823936518E-1",
+    "-3.8823936518E-28",
+    "-3.8823936518E-29",
+    "598575157855521918987423259.94094",
+    "299999448432.001342152474197",
+    "1786135888657847525803324040144343378.09799306448796128931113691624",  // More than 38 digits.
+    "-1786135888657847525803324040144343378.09799306448796128931113691624",
+    "57847525803324040144343378.09799306448796128931113691624",
+    "0.999999999999999999990000",
+    "005.34000",
+    "1E-90",
+
+    "0.4",
+    "-0.4",
+    "0.5",
+    "-0.5",
+    "0.6",
+    "-0.6",
+    "1.4",
+    "-1.4",
+    "1.5",
+    "-1.5",
+    "1.6",
+    "-1.6",
+    "2.4",
+    "-2.4",
+    "2.49",
+    "-2.49",
+    "2.5",
+    "-2.5",
+    "2.51",
+    "-2.51",
+    "-2.5",
+    "2.6",
+    "-2.6",
+    "3.00001415926",
+    "0.00",
+    "-12.25",
+    "234.79"
+  };
+
+  public static BigDecimal[] specialBigDecimals = stringArrayToBigDecimals(specialDecimalStrings);
+
+  // decimal_1_1.txt
+  public static String[] decimal_1_1_txt = {
+    "0.0",
+    "0.0000",
+    ".0",
+    "0.1",
+    "0.15",
+    "0.9",
+    "0.94",
+    "0.99",
+    "0.345",
+    "1.0",
+    "1",
+    "0",
+    "00",
+    "22",
+    "1E-9",
+    "-0.0",
+    "-0.0000",
+    "-.0",
+    "-0.1",
+    "-0.15",
+    "-0.9",
+    "-0.94",
+    "-0.99",
+    "-0.345",
+    "-1.0",
+    "-1",
+    "-0",
+    "-00",
+    "-22",
+    "-1E-9"
+  };
+
+  // kv7.txt KEYS
+  public static String[] kv7_txt_keys = {
+    "-4400",
+    "1E+99",
+    "1E-99",
+    "0",
+    "100",
+    "10",
+    "1",
+    "0.1",
+    "0.01",
+    "200",
+    "20",
+    "2",
+    "0",
+    "0.2",
+    "0.02",
+    "0.3",
+    "0.33",
+    "0.333",
+    "-0.3",
+    "-0.33",
+    "-0.333",
+    "1.0",
+    "2",
+    "3.14",
+    "-1.12",
+    "-1.12",
+    "-1.122",
+    "1.12",
+    "1.122",
+    "124.00",
+    "125.2",
+    "-1255.49",
+    "3.14",
+    "3.14",
+    "3.140",
+    "0.9999999999999999999999999",
+    "-1234567890.1234567890",
+    "1234567890.1234567800"
+  };
+
+  public static String standardAlphabet = "0123456789";
+
+  public static String[] sparseAlphabets = new String[] {
+
+    "0000000000000000000000000000000000000003",
+    "0000000000000000000000000000000000000009",
+    "0000000000000000000000000000000000000001",
+    "0000000000000000000003",
+    "0000000000000000000009",
+    "0000000000000000000001",
+    "0000000000091",
+    "000000000005",
+    "9",
+    "5555555555999999999000000000000001111111",
+    "24680",
+    "1"
+  };
+
+  public static BigDecimal[] stringArrayToBigDecimals(String[] strings) {
+    BigDecimal[] result = new BigDecimal[strings.length];
+    for (int i = 0; i < strings.length; i++) {
+      result[i] = new BigDecimal(strings[i]);
+    }
+    return result;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
deleted file mode 100644
index f68842c..0000000
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.common.type;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import static org.junit.Assert.*;
-
-public class TestHiveDecimal {
-
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testPrecisionScaleEnforcement() {
-    String decStr = "1786135888657847525803324040144343378.09799306448796128931113691624";
-    HiveDecimal dec = HiveDecimal.create(decStr);
-    Assert.assertEquals("1786135888657847525803324040144343378.1", dec.toString());
-    Assert.assertTrue("Decimal precision should not go above maximum",
-        dec.precision() <= HiveDecimal.MAX_PRECISION);
-    Assert.assertTrue("Decimal scale should not go above maximum", dec.scale() <= HiveDecimal.MAX_SCALE);
-
-    decStr = "57847525803324040144343378.09799306448796128931113691624";
-    HiveDecimal bd = HiveDecimal.create(decStr);
-    HiveDecimal bd1 = HiveDecimal.enforcePrecisionScale(bd, 20, 5);
-    Assert.assertNull(bd1);
-    bd1 = HiveDecimal.enforcePrecisionScale(bd, 35, 5);
-    Assert.assertEquals("57847525803324040144343378.09799", bd1.toString());
-    bd1 = HiveDecimal.enforcePrecisionScale(bd, 45, 20);
-    Assert.assertNull(bd1);
-
-    dec = HiveDecimal.create(new BigDecimal(decStr), false);
-    Assert.assertNull(dec);
-
-    dec = HiveDecimal.create("-1786135888657847525803324040144343378.09799306448796128931113691624");
-    Assert.assertEquals("-1786135888657847525803324040144343378.1", dec.toString());
-
-    dec = HiveDecimal.create("005.34000");
-    Assert.assertEquals(dec.precision(), 3);
-    Assert.assertEquals(dec.scale(), 2);
-
-    dec = HiveDecimal.create("178613588865784752580332404014434337809799306448796128931113691624");
-    Assert.assertNull(dec);
-
-    // Rounding numbers that increase int digits
-    Assert.assertEquals("10",
-        HiveDecimal.enforcePrecisionScale(HiveDecimal.create("9.5"), 2, 0).toString());
-    Assert.assertNull(HiveDecimal.enforcePrecisionScale(HiveDecimal.create("9.5"), 1, 0));
-    Assert.assertEquals("9",
-        HiveDecimal.enforcePrecisionScale(HiveDecimal.create("9.4"), 1, 0).toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testTrailingZeroRemovalAfterEnforcement() {
-    String decStr = "8.090000000000000000000000000000000000000123456";
-    HiveDecimal dec = HiveDecimal.create(decStr);
-    Assert.assertEquals("8.09", dec.toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testMultiply() {
-    HiveDecimal dec1 = HiveDecimal.create("0.00001786135888657847525803");
-    HiveDecimal dec2 = HiveDecimal.create("3.0000123456789");
-    Assert.assertNull(dec1.multiply(dec2));
-
-    dec1 = HiveDecimal.create("178613588865784752580323232232323444.4");
-    dec2 = HiveDecimal.create("178613588865784752580302323232.3");
-    Assert.assertNull(dec1.multiply(dec2));
-
-    dec1 = HiveDecimal.create("47.324");
-    dec2 = HiveDecimal.create("9232.309");
-    Assert.assertEquals("436909.791116", dec1.multiply(dec2).toString());
-
-    dec1 = HiveDecimal.create("3.140");
-    dec2 = HiveDecimal.create("1.00");
-    Assert.assertEquals("3.14", dec1.multiply(dec2).toString());
-
-    dec1 = HiveDecimal.create("43.010");
-    dec2 = HiveDecimal.create("2");
-    Assert.assertEquals("86.02", dec1.multiply(dec2).toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testPow() {
-    HiveDecimal dec = HiveDecimal.create("3.00001415926");
-    Assert.assertEquals(dec.pow(2), dec.multiply(dec));
-
-    HiveDecimal dec1 = HiveDecimal.create("0.000017861358882");
-    dec1 = dec1.pow(3);
-    Assert.assertNull(dec1);
-
-    dec1 = HiveDecimal.create("3.140");
-    Assert.assertEquals("9.8596", dec1.pow(2).toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testDivide() {
-    HiveDecimal dec1 = HiveDecimal.create("3.14");
-    HiveDecimal dec2 = HiveDecimal.create("3");
-    Assert.assertNotNull(dec1.divide(dec2));
-
-    dec1 = HiveDecimal.create("15");
-    dec2 = HiveDecimal.create("5");
-    Assert.assertEquals("3", dec1.divide(dec2).toString());
-
-    dec1 = HiveDecimal.create("3.140");
-    dec2 = HiveDecimal.create("1.00");
-    Assert.assertEquals("3.14", dec1.divide(dec2).toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testPlus() {
-    HiveDecimal dec1 = HiveDecimal.create("99999999999999999999999999999999999");
-    HiveDecimal dec2 = HiveDecimal.create("1");
-    Assert.assertNotNull(dec1.add(dec2));
-
-    dec1 = HiveDecimal.create("3.140");
-    dec2 = HiveDecimal.create("1.00");
-    Assert.assertEquals("4.14", dec1.add(dec2).toString());
-  }
-
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testSubtract() {
-      HiveDecimal dec1 = HiveDecimal.create("3.140");
-      HiveDecimal dec2 = HiveDecimal.create("1.00");
-      Assert.assertEquals("2.14", dec1.subtract(dec2).toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testPosMod() {
-    HiveDecimal hd1 = HiveDecimal.create("-100.91");
-    HiveDecimal hd2 = HiveDecimal.create("9.8");
-    HiveDecimal dec = hd1.remainder(hd2).add(hd2).remainder(hd2);
-    Assert.assertEquals("6.89", dec.toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testHashCode() {
-      Assert.assertEquals(HiveDecimal.create("9").hashCode(), HiveDecimal.create("9.00").hashCode());
-      Assert.assertEquals(HiveDecimal.create("0").hashCode(), HiveDecimal.create("0.00").hashCode());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testException() {
-    HiveDecimal dec = HiveDecimal.create("3.1415.926");
-    Assert.assertNull(dec);
-    dec = HiveDecimal.create("3abc43");
-    Assert.assertNull(dec);
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testBinaryConversion() {
-    testBinaryConversion("0.00");
-    testBinaryConversion("-12.25");
-    testBinaryConversion("234.79");
-  }
-
-  private void testBinaryConversion(String num) {
-    HiveDecimal dec = HiveDecimal.create(num);
-    int scale = 2;
-    byte[] d = dec.setScale(2).unscaledValue().toByteArray();
-    Assert.assertEquals(dec, HiveDecimal.create(new BigInteger(d), scale));
-    int prec = 5;
-    int len =  (int)
-        Math.ceil((Math.log(Math.pow(10, prec) - 1) / Math.log(2) + 1) / 8);
-    byte[] res = new byte[len];
-    if ( dec.signum() == -1)
-      for (int i = 0; i < len; i++)
-        res[i] |= 0xFF;
-    System.arraycopy(d, 0, res, len-d.length, d.length); // Padding leading zeros.
-    Assert.assertEquals(dec, HiveDecimal.create(new BigInteger(res), scale));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
new file mode 100644
index 0000000..882ff86
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
@@ -0,0 +1,378 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.sql.Timestamp;
+import java.util.Random;
+import java.util.Arrays;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.orc.impl.SerializationUtils;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
+
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
+import org.junit.*;
+
+import static org.junit.Assert.*;
+
+public class TestHiveDecimalOrcSerializationUtils extends HiveDecimalTestBase {
+
+  //------------------------------------------------------------------------------------------------
+
+  @Test
+  @Concurrent(count=4)
+  public void testSerializationUtilsWriteRead() {
+    testSerializationUtilsWriteRead("0.00");
+    testSerializationUtilsWriteRead("1");
+    testSerializationUtilsWriteRead("234.79");
+    testSerializationUtilsWriteRead("-12.25");
+    testSerializationUtilsWriteRead("99999999999999999999999999999999");
+    testSerializationUtilsWriteRead("-99999999999999999999999999999999");
+    testSerializationUtilsWriteRead("99999999999999999999999999999999999999");
+    //                               12345678901234567890123456789012345678
+    testSerializationUtilsWriteRead("-99999999999999999999999999999999999999");
+    testSerializationUtilsWriteRead("999999999999.99999999999999999999");
+    testSerializationUtilsWriteRead("-999999.99999999999999999999999999");
+    testSerializationUtilsWriteRead("9999999999999999999999.9999999999999999");
+    testSerializationUtilsWriteRead("-9999999999999999999999999999999.9999999");
+
+    testSerializationUtilsWriteRead("4611686018427387903");  // 2^62 - 1
+    testSerializationUtilsWriteRead("-4611686018427387903");
+    testSerializationUtilsWriteRead("4611686018427387904");  // 2^62
+    testSerializationUtilsWriteRead("-4611686018427387904");
+
+    testSerializationUtilsWriteRead("42535295865117307932921825928971026431");  // 2^62*2^63 - 1
+    testSerializationUtilsWriteRead("-42535295865117307932921825928971026431");
+    testSerializationUtilsWriteRead("42535295865117307932921825928971026432");  // 2^62*2^63
+    testSerializationUtilsWriteRead("-42535295865117307932921825928971026432");
+
+    testSerializationUtilsWriteRead("54216721532321902598.70");
+    testSerializationUtilsWriteRead("-906.62545207002374150309544832320");
+  }
+
+  private void testSerializationUtilsWriteRead(String string) {
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ START ~~~~~~~~~~~~~~~~~");
+
+    HiveDecimal dec = HiveDecimal.create(string);
+    assertTrue(dec != null);
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER dec " + dec.toString());
+
+    BigInteger bigInteger = dec.unscaledValue();
+    int scale = dec.scale();
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER bigInteger " + bigInteger.toString());
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER scale " + scale);
+
+    //---------------------------------------------------
+    HiveDecimalV1 oldDec = HiveDecimalV1.create(string);
+    assertTrue(oldDec != null);
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER oldDec " + oldDec.toString());
+
+    BigInteger oldBigInteger = oldDec.unscaledValue();
+    int oldScale = oldDec.scale();
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER oldBigInteger " + oldBigInteger.toString());
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER oldScale " + oldScale);
+    //---------------------------------------------------
+
+    long[] scratchLongs = new long[HiveDecimal.SCRATCH_LONGS_LEN];
+
+    int which = 0;
+    try {
+      ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+      if (!dec.serializationUtilsWrite(
+          outputStream, scratchLongs)) {
+        // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER serializationUtilsWrite failed");
+        fail();
+      }
+      byte[] bytes = outputStream.toByteArray();
+  
+      ByteArrayOutputStream outputStreamExpected = new ByteArrayOutputStream();
+      SerializationUtils.writeBigInteger(outputStreamExpected, bigInteger);
+      byte[] bytesExpected = outputStreamExpected.toByteArray();
+  
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER check streams");
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER bytes1        " + displayBytes(bytes, 0, bytes.length));
+      if (!StringExpr.equal(bytes, 0, bytes.length, bytesExpected, 0, bytesExpected.length)) {
+        // Tailing zeroes difference ok.
+        // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER streams not equal");
+        // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER bytesExpected " + displayBytes(bytesExpected, 0, bytesExpected.length));
+      }
+      // Deserialize and check...
+      which = 1;
+      ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
+      BigInteger deserializedBigInteger = SerializationUtils.readBigInteger(byteArrayInputStream);
+
+      which = 2;
+      ByteArrayInputStream byteArrayInputStreamExpected = new ByteArrayInputStream(bytesExpected);
+      BigInteger deserializedBigIntegerExpected = SerializationUtils.readBigInteger(byteArrayInputStreamExpected);
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER deserialized equals " +
+      //    deserializedBigInteger.equals(deserializedBigIntegerExpected));
+      if (!deserializedBigInteger.equals(deserializedBigIntegerExpected)) {
+        // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER deserializedBigInteger " + deserializedBigInteger.toString() +
+        //    " deserializedBigIntegerExpected " + deserializedBigIntegerExpected.toString());
+        fail();
+      }
+
+      which = 3;
+      ByteArrayInputStream byteArrayInputStreamRead = new ByteArrayInputStream(bytes);
+      byte[] scratchBytes = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_SERIALIZATION_UTILS_READ];
+      HiveDecimal readHiveDecimal =
+          HiveDecimal.serializationUtilsRead(byteArrayInputStreamRead, scale, scratchBytes);
+      assertTrue(readHiveDecimal != null);
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER read readHiveDecimal " + readHiveDecimal.toString() +
+      //    " dec " + dec.toString() + " (scale parameter " + scale + ")");
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER read toString equals " +
+      //    readHiveDecimal.toString().equals(dec.toString()));
+      assertEquals(readHiveDecimal.toString(), dec.toString());
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER read equals " +
+      //    readHiveDecimal.equals(dec));
+      assertEquals(readHiveDecimal, dec);
+    } catch (IOException e) {
+      // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER " + e + " which " + which);
+      fail();
+    }
+    // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~  END  ~~~~~~~~~~~~~~~~~");
+
+  }
+
+  //------------------------------------------------------------------------------------------------
+
+  @Test
+  public void testRandomSerializationUtilsRead()
+      throws IOException {
+    doTestRandomSerializationUtilsRead(standardAlphabet);
+  }
+
+  @Test
+  public void testRandomSerializationUtilsReadSparse()
+      throws IOException {
+    for (String digitAlphabet : sparseAlphabets) {
+      doTestRandomSerializationUtilsRead(digitAlphabet);
+    }
+  }
+
+  private void doTestRandomSerializationUtilsRead(String digitAlphabet)
+      throws IOException {
+
+    Random r = new Random(2389);
+    for (int i = 0; i < POUND_FACTOR; i++) {
+      BigInteger bigInteger = randHiveBigInteger(r, digitAlphabet);
+
+      doTestSerializationUtilsRead(r, bigInteger);
+    }
+  }
+
+  @Test
+  public void testSerializationUtilsReadSpecial()
+      throws IOException {
+    Random r = new Random(9923);
+    for (BigDecimal bigDecimal : specialBigDecimals) {
+      doTestSerializationUtilsRead(r, bigDecimal.unscaledValue());
+    }
+  }
+
+  private void doTestSerializationUtilsRead(Random r, BigInteger bigInteger)
+     throws IOException {
+
+    // System.out.println("TEST_SERIALIZATION_UTILS_READ bigInteger " + bigInteger);
+
+    HiveDecimalV1 oldDec = HiveDecimalV1.create(bigInteger);
+    if (oldDec != null && isTenPowerBug(oldDec.toString())) {
+      return;
+    }
+    HiveDecimal dec = HiveDecimal.create(bigInteger);
+    if (oldDec == null) {
+      assertTrue(dec == null);
+      return;
+    }
+    assertTrue(dec != null);
+    dec.validate();
+    // System.out.println("TEST_SERIALIZATION_UTILS_READ oldDec " + oldDec);
+    // System.out.println("TEST_SERIALIZATION_UTILS_READ dec " + dec);
+
+    Assert.assertEquals(bigInteger, oldDec.unscaledValue());
+    Assert.assertEquals(bigInteger, dec.unscaledValue());
+
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    SerializationUtils.writeBigInteger(outputStream, bigInteger);
+    byte[] bytes = outputStream.toByteArray();
+
+    ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
+    BigInteger deserializedBigInteger =
+        SerializationUtils.readBigInteger(byteArrayInputStream);
+
+    // Verify SerializationUtils first.
+    Assert.assertEquals(bigInteger, deserializedBigInteger);
+
+    // Now HiveDecimal
+    byte[] scratchBytes = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_SERIALIZATION_UTILS_READ];
+
+    byteArrayInputStream = new ByteArrayInputStream(bytes);
+    HiveDecimal resultDec =
+        dec.serializationUtilsRead(
+            byteArrayInputStream, dec.scale(),
+            scratchBytes);
+    assertTrue(resultDec != null);
+    resultDec.validate();
+
+    Assert.assertEquals(dec.toString(), resultDec.toString());
+
+    //----------------------------------------------------------------------------------------------
+
+    // Add scale.
+
+    int scale = 0 + r.nextInt(38 + 1);
+    BigDecimal bigDecimal = new BigDecimal(bigInteger, scale);
+
+    oldDec = HiveDecimalV1.create(bigDecimal);
+    dec = HiveDecimal.create(bigDecimal);
+    if (oldDec == null) {
+      assertTrue(dec == null);
+      return;
+    }
+    assertTrue(dec != null);
+    dec.validate();
+    // System.out.println("TEST_SERIALIZATION_UTILS_READ with scale oldDec " + oldDec);
+    // System.out.println("TEST_SERIALIZATION_UTILS_READ with scale dec " + dec);
+
+    outputStream = new ByteArrayOutputStream();
+    SerializationUtils.writeBigInteger(outputStream, dec.unscaledValue());
+    bytes = outputStream.toByteArray();
+
+    // Now HiveDecimal
+    byteArrayInputStream = new ByteArrayInputStream(bytes);
+    resultDec =
+        dec.serializationUtilsRead(
+            byteArrayInputStream, dec.scale(),
+            scratchBytes);
+    assertTrue(resultDec != null);
+    resultDec.validate();
+
+    Assert.assertEquals(dec.toString(), resultDec.toString());
+  }
+
+  //------------------------------------------------------------------------------------------------
+
+  @Test
+  public void testRandomSerializationUtilsWrite()
+      throws IOException {
+    doTestRandomSerializationUtilsWrite(standardAlphabet, false);
+  }
+
+  @Test
+  public void testRandomSerializationUtilsWriteFractionsOnly()
+      throws IOException {
+    doTestRandomSerializationUtilsWrite(standardAlphabet, true);
+  }
+
+  @Test
+  public void testRandomSerializationUtilsWriteSparse()
+      throws IOException {
+    for (String digitAlphabet : sparseAlphabets) {
+      doTestRandomSerializationUtilsWrite(digitAlphabet, false);
+    }
+  }
+
+  private void doTestRandomSerializationUtilsWrite(String digitAlphabet, boolean fractionsOnly)
+      throws IOException {
+
+    Random r = new Random(823);
+    for (int i = 0; i < POUND_FACTOR; i++) {
+      BigInteger bigInteger = randHiveBigInteger(r, digitAlphabet);
+
+      doTestSerializationUtilsWrite(r, bigInteger);
+    }
+  }
+
+  @Test
+  public void testSerializationUtilsWriteSpecial()
+      throws IOException {
+    Random r = new Random(998737);
+    for (BigDecimal bigDecimal : specialBigDecimals) {
+      doTestSerializationUtilsWrite(r, bigDecimal.unscaledValue());
+    }
+  }
+
+  private void doTestSerializationUtilsWrite(Random r, BigInteger bigInteger)
+     throws IOException {
+
+    // System.out.println("TEST_SERIALIZATION_UTILS_WRITE bigInteger " + bigInteger);
+
+    HiveDecimalV1 oldDec = HiveDecimalV1.create(bigInteger);
+    if (oldDec != null && isTenPowerBug(oldDec.toString())) {
+      return;
+    }
+    HiveDecimal dec = HiveDecimal.create(bigInteger);
+    if (oldDec == null) {
+      assertTrue(dec == null);
+      return;
+    }
+    assertTrue(dec != null);
+    dec.validate();
+    // System.out.println("TEST_SERIALIZATION_UTILS_WRITE oldDec " + oldDec);
+    // System.out.println("TEST_SERIALIZATION_UTILS_WRITE dec " + dec);
+
+    Assert.assertEquals(bigInteger, oldDec.unscaledValue());
+    Assert.assertEquals(bigInteger, dec.unscaledValue());
+
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    SerializationUtils.writeBigInteger(outputStream, bigInteger);
+    byte[] bytes = outputStream.toByteArray();
+
+    ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
+    BigInteger deserializedBigInteger =
+        SerializationUtils.readBigInteger(byteArrayInputStream);
+
+    // Verify SerializationUtils first.
+    Assert.assertEquals(bigInteger, deserializedBigInteger);
+
+    ByteArrayOutputStream decOutputStream = new ByteArrayOutputStream();
+
+    long[] scratchLongs = new long[HiveDecimal.SCRATCH_LONGS_LEN];
+
+    boolean successful =
+        dec.serializationUtilsWrite(
+            decOutputStream, scratchLongs);
+    Assert.assertTrue(successful);
+    byte[] decBytes = decOutputStream.toByteArray();
+
+    if (!StringExpr.equal(bytes, 0, bytes.length, decBytes, 0, decBytes.length)) {
+      // Tailing zeroes difference ok...
+      // System.out.println("TEST_SERIALIZATION_UTILS_WRITE streams not equal");
+      // System.out.println("TEST_SERIALIZATION_UTILS_WRITE bytes " + displayBytes(bytes, 0, bytes.length));
+      // System.out.println("TEST_SERIALIZATION_UTILS_WRITE decBytes " + displayBytes(decBytes, 0, decBytes.length));
+    }
+
+    ByteArrayInputStream decByteArrayInputStream = new ByteArrayInputStream(decBytes);
+    BigInteger decDeserializedBigInteger =
+        SerializationUtils.readBigInteger(decByteArrayInputStream);
+
+    Assert.assertEquals(bigInteger, decDeserializedBigInteger);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/orc/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java b/orc/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
index 745ed9a..1118c5c 100644
--- a/orc/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
+++ b/orc/src/java/org/apache/orc/impl/ColumnStatisticsImpl.java
@@ -20,6 +20,7 @@ package org.apache.orc.impl;
 import java.sql.Date;
 import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.io.BytesWritable;
@@ -573,9 +574,11 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
 
   private static final class DecimalStatisticsImpl extends ColumnStatisticsImpl
       implements DecimalColumnStatistics {
-    private HiveDecimal minimum = null;
-    private HiveDecimal maximum = null;
-    private HiveDecimal sum = HiveDecimal.ZERO;
+
+    // These objects are mutable for better performance.
+    private HiveDecimalWritable minimum = null;
+    private HiveDecimalWritable maximum = null;
+    private HiveDecimalWritable sum = new HiveDecimalWritable(0);
 
     DecimalStatisticsImpl() {
     }
@@ -584,13 +587,13 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
       super(stats);
       OrcProto.DecimalStatistics dec = stats.getDecimalStatistics();
       if (dec.hasMaximum()) {
-        maximum = HiveDecimal.create(dec.getMaximum());
+        maximum = new HiveDecimalWritable(dec.getMaximum());
       }
       if (dec.hasMinimum()) {
-        minimum = HiveDecimal.create(dec.getMinimum());
+        minimum = new HiveDecimalWritable(dec.getMinimum());
       }
       if (dec.hasSum()) {
-        sum = HiveDecimal.create(dec.getSum());
+        sum = new HiveDecimalWritable(dec.getSum());
       } else {
         sum = null;
       }
@@ -601,21 +604,21 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
       super.reset();
       minimum = null;
       maximum = null;
-      sum = HiveDecimal.ZERO;
+      sum = new HiveDecimalWritable(0);
     }
 
     @Override
-    public void updateDecimal(HiveDecimal value) {
+    public void updateDecimal(HiveDecimalWritable value) {
       if (minimum == null) {
-        minimum = value;
-        maximum = value;
+        minimum = new HiveDecimalWritable(value);
+        maximum = new HiveDecimalWritable(value);
       } else if (minimum.compareTo(value) > 0) {
-        minimum = value;
+        minimum.set(value);
       } else if (maximum.compareTo(value) < 0) {
-        maximum = value;
+        maximum.set(value);
       }
       if (sum != null) {
-        sum = sum.add(value);
+        sum.mutateAdd(value);
       }
     }
 
@@ -624,20 +627,20 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
       if (other instanceof DecimalStatisticsImpl) {
         DecimalStatisticsImpl dec = (DecimalStatisticsImpl) other;
         if (minimum == null) {
-          minimum = dec.minimum;
-          maximum = dec.maximum;
+          minimum = (dec.minimum != null ? new HiveDecimalWritable(dec.minimum) : null);
+          maximum = (dec.maximum != null ? new HiveDecimalWritable(dec.maximum) : null);
           sum = dec.sum;
         } else if (dec.minimum != null) {
           if (minimum.compareTo(dec.minimum) > 0) {
-            minimum = dec.minimum;
+            minimum.set(dec.minimum);
           }
           if (maximum.compareTo(dec.maximum) < 0) {
-            maximum = dec.maximum;
+            maximum.set(dec.maximum);
           }
           if (sum == null || dec.sum == null) {
             sum = null;
           } else {
-            sum = sum.add(dec.sum);
+            sum.mutateAdd(dec.sum);
           }
         }
       } else {
@@ -657,7 +660,8 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
         dec.setMinimum(minimum.toString());
         dec.setMaximum(maximum.toString());
       }
-      if (sum != null) {
+      // Check isSet for overflow.
+      if (sum != null && sum.isSet()) {
         dec.setSum(sum.toString());
       }
       result.setDecimalStatistics(dec);
@@ -666,17 +670,17 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
 
     @Override
     public HiveDecimal getMinimum() {
-      return minimum;
+      return minimum.getHiveDecimal();
     }
 
     @Override
     public HiveDecimal getMaximum() {
-      return maximum;
+      return maximum.getHiveDecimal();
     }
 
     @Override
     public HiveDecimal getSum() {
-      return sum;
+      return sum.getHiveDecimal();
     }
 
     @Override
@@ -987,7 +991,7 @@ public class ColumnStatisticsImpl implements ColumnStatistics {
     throw new UnsupportedOperationException("Can't update string");
   }
 
-  public void updateDecimal(HiveDecimal value) {
+  public void updateDecimal(HiveDecimalWritable value) {
     throw new UnsupportedOperationException("Can't update decimal");
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java b/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
index 5d5f991..e60075f 100644
--- a/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
+++ b/orc/src/java/org/apache/orc/impl/ConvertTreeReaderFactory.java
@@ -608,19 +608,55 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
       setConvertTreeReader(decimalTreeReader);
     }
 
-    private static HiveDecimal DECIMAL_MAX_LONG = HiveDecimal.create(Long.MAX_VALUE);
-    private static HiveDecimal DECIMAL_MIN_LONG = HiveDecimal.create(Long.MIN_VALUE);
-
     @Override
     public void setConvertVectorElement(int elementNum) throws IOException {
-      HiveDecimal decimalValue = decimalColVector.vector[elementNum].getHiveDecimal();
-      if (decimalValue.compareTo(DECIMAL_MAX_LONG) > 0 ||
-          decimalValue.compareTo(DECIMAL_MIN_LONG) < 0) {
+      HiveDecimalWritable decWritable = decimalColVector.vector[elementNum];
+      long[] vector = longColVector.vector;
+      Category readerCategory = readerType.getCategory();
+
+      // Check to see if the decimal will fit in the Hive integer data type.
+      // If not, set the element to null.
+      boolean isInRange;
+      switch (readerCategory) {
+      case BOOLEAN:
+        // No data loss for boolean.
+        vector[elementNum] = decWritable.signum() == 0 ? 0 : 1;
+        return;
+      case BYTE:
+        isInRange = decWritable.isByte();
+        break;
+      case SHORT:
+        isInRange = decWritable.isShort();
+        break;
+      case INT:
+        isInRange = decWritable.isInt();
+        break;
+      case LONG:
+        isInRange = decWritable.isLong();
+        break;
+      default:
+        throw new RuntimeException("Unexpected type kind " + readerCategory.name());
+      }
+      if (!isInRange) {
         longColVector.isNull[elementNum] = true;
         longColVector.noNulls = false;
-      } else {
-        // TODO: lossy conversion!
-        downCastAnyInteger(longColVector, elementNum, decimalValue.longValue(), readerType);
+        return;
+      }
+      switch (readerCategory) {
+      case BYTE:
+        vector[elementNum] = decWritable.byteValue();
+        break;
+      case SHORT:
+        vector[elementNum] = decWritable.shortValue();
+        break;
+      case INT:
+        vector[elementNum] = decWritable.intValue();
+        break;
+      case LONG:
+        vector[elementNum] = decWritable.longValue();
+        break;
+      default:
+        throw new RuntimeException("Unexpected type kind " + readerCategory.name());
       }
     }
 
@@ -828,7 +864,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
     @Override
     public void setConvertVectorElement(int elementNum) throws IOException {
       doubleColVector.vector[elementNum] =
-          (float) decimalColVector.vector[elementNum].getHiveDecimal().doubleValue();
+          (float) decimalColVector.vector[elementNum].doubleValue();
     }
 
     @Override
@@ -1034,7 +1070,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
     @Override
     public void setConvertVectorElement(int elementNum) throws IOException {
       doubleColVector.vector[elementNum] =
-          decimalColVector.vector[elementNum].getHiveDecimal().doubleValue();
+          decimalColVector.vector[elementNum].doubleValue();
     }
 
     @Override
@@ -1371,14 +1407,8 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
     @Override
     public void setConvertVectorElement(int elementNum) throws IOException {
 
-      HiveDecimalWritable valueWritable = HiveDecimalWritable.enforcePrecisionScale(
-          fileDecimalColVector.vector[elementNum], readerPrecision, readerScale);
-      if (valueWritable != null) {
-        decimalColVector.set(elementNum, valueWritable);
-      } else {
-        decimalColVector.noNulls = false;
-        decimalColVector.isNull[elementNum] = true;
-      }
+      decimalColVector.set(elementNum, fileDecimalColVector.vector[elementNum]);
+
     }
 
     @Override
@@ -1540,6 +1570,7 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
     private final TypeDescription readerType;
     private DecimalColumnVector decimalColVector;
     private BytesColumnVector bytesColVector;
+    private byte[] scratchBuffer;
 
     StringGroupFromDecimalTreeReader(int columnId, TypeDescription fileType,
         TypeDescription readerType, boolean skipCorrupt) throws IOException {
@@ -1549,13 +1580,19 @@ public class ConvertTreeReaderFactory extends TreeReaderFactory {
       this.readerType = readerType;
       decimalTreeReader = new DecimalTreeReader(columnId, precision, scale);
       setConvertTreeReader(decimalTreeReader);
+      scratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
     }
 
     @Override
     public void setConvertVectorElement(int elementNum) {
-      String string = decimalColVector.vector[elementNum].getHiveDecimal().toString();
-      byte[] bytes = string.getBytes();
-      assignStringGroupVectorEntry(bytesColVector, elementNum, readerType, bytes);
+      HiveDecimalWritable decWritable = decimalColVector.vector[elementNum];
+
+      // Convert decimal into bytes instead of a String for better performance.
+      final int byteIndex = decWritable.toBytes(scratchBuffer);
+
+      assignStringGroupVectorEntry(
+          bytesColVector, elementNum, readerType,
+          scratchBuffer, byteIndex, HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES - byteIndex);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/orc/src/java/org/apache/orc/impl/TreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/impl/TreeReaderFactory.java b/orc/src/java/org/apache/orc/impl/TreeReaderFactory.java
index 484209b..3ddafba 100644
--- a/orc/src/java/org/apache/orc/impl/TreeReaderFactory.java
+++ b/orc/src/java/org/apache/orc/impl/TreeReaderFactory.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.OrcProto;
 
@@ -1044,6 +1045,7 @@ public class TreeReaderFactory {
     protected InStream valueStream;
     protected IntegerReader scaleReader = null;
     private int[] scratchScaleVector;
+    private byte[] scratchBytes;
 
     private final int precision;
     private final int scale;
@@ -1060,6 +1062,7 @@ public class TreeReaderFactory {
       this.scale = scale;
       this.scratchScaleVector = new int[VectorizedRowBatch.DEFAULT_SIZE];
       this.valueStream = valueStream;
+      this.scratchBytes = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_SERIALIZATION_UTILS_READ];
       if (scaleStream != null && encoding != null) {
         checkEncoding(encoding);
         this.scaleReader = createIntegerReader(encoding.getKind(), scaleStream, true, false);
@@ -1112,18 +1115,30 @@ public class TreeReaderFactory {
       // read the scales
       scaleReader.nextVector(result, scratchScaleVector, batchSize);
       // Read value entries based on isNull entries
+      // Use the fast ORC deserialization method that emulates SerializationUtils.readBigInteger
+      // provided by HiveDecimalWritable.
+      HiveDecimalWritable[] vector = result.vector;
+      HiveDecimalWritable decWritable;
       if (result.noNulls) {
         for (int r=0; r < batchSize; ++r) {
-          BigInteger bInt = SerializationUtils.readBigInteger(valueStream);
-          HiveDecimal dec = HiveDecimal.create(bInt, scratchScaleVector[r]);
-          result.set(r, dec);
+          decWritable = vector[r];
+          if (!decWritable.serializationUtilsRead(
+              valueStream, scratchScaleVector[r],
+              scratchBytes)) {
+            result.isNull[r] = true;
+            result.noNulls = false;
+          }
         }
       } else if (!result.isRepeating || !result.isNull[0]) {
         for (int r=0; r < batchSize; ++r) {
           if (!result.isNull[r]) {
-            BigInteger bInt = SerializationUtils.readBigInteger(valueStream);
-            HiveDecimal dec = HiveDecimal.create(bInt, scratchScaleVector[r]);
-            result.set(r, dec);
+            decWritable = vector[r];
+            if (!decWritable.serializationUtilsRead(
+                valueStream, scratchScaleVector[r],
+                scratchBytes)) {
+              result.isNull[r] = true;
+              result.noNulls = false;
+            }
           }
         }
       }
@@ -1132,8 +1147,9 @@ public class TreeReaderFactory {
     @Override
     void skipRows(long items) throws IOException {
       items = countNonNulls(items);
+      HiveDecimalWritable scratchDecWritable = new HiveDecimalWritable();
       for (int i = 0; i < items; i++) {
-        SerializationUtils.readBigInteger(valueStream);
+        scratchDecWritable.serializationUtilsRead(valueStream, 0, scratchBytes);
       }
       scaleReader.skip(items);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/orc/src/java/org/apache/orc/impl/WriterImpl.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/impl/WriterImpl.java b/orc/src/java/org/apache/orc/impl/WriterImpl.java
index b17fb41..518a5f7 100644
--- a/orc/src/java/org/apache/orc/impl/WriterImpl.java
+++ b/orc/src/java/org/apache/orc/impl/WriterImpl.java
@@ -30,6 +30,7 @@ import java.util.Map;
 import java.util.TimeZone;
 import java.util.TreeMap;
 
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
 import org.apache.orc.BinaryColumnStatistics;
 import org.apache.orc.BloomFilterIO;
@@ -1610,6 +1611,11 @@ public class WriterImpl implements Writer, MemoryManager.Callback {
 
   private static class DecimalTreeWriter extends TreeWriter {
     private final PositionedOutputStream valueStream;
+
+    // These scratch buffers allow us to serialize decimals much faster.
+    private final long[] scratchLongs;
+    private final byte[] scratchBuffer;
+
     private final IntegerWriter scaleStream;
     private final boolean isDirectV2;
 
@@ -1620,6 +1626,8 @@ public class WriterImpl implements Writer, MemoryManager.Callback {
       super(columnId, schema, writer, nullable);
       this.isDirectV2 = isNewWriteFormat(writer);
       valueStream = writer.createStream(id, OrcProto.Stream.Kind.DATA);
+      scratchLongs = new long[HiveDecimal.SCRATCH_LONGS_LEN];
+      scratchBuffer = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_TO_BYTES];
       this.scaleStream = createIntegerWriter(writer.createStream(id,
           OrcProto.Stream.Kind.SECONDARY), true, isDirectV2, writer);
       recordPosition(rowIndexPosition);
@@ -1642,27 +1650,36 @@ public class WriterImpl implements Writer, MemoryManager.Callback {
       DecimalColumnVector vec = (DecimalColumnVector) vector;
       if (vector.isRepeating) {
         if (vector.noNulls || !vector.isNull[0]) {
-          HiveDecimal value = vec.vector[0].getHiveDecimal();
+          HiveDecimalWritable value = vec.vector[0];
           indexStatistics.updateDecimal(value);
           if (createBloomFilter) {
-            bloomFilter.addString(value.toString());
+
+            // The HiveDecimalWritable toString() method with a scratch buffer for good performance
+            // when creating the String.  We need to use a String hash code and not UTF-8 byte[]
+            // hash code in order to get the right hash code.
+            bloomFilter.addString(value.toString(scratchBuffer));
           }
           for(int i=0; i < length; ++i) {
-            SerializationUtils.writeBigInteger(valueStream,
-                value.unscaledValue());
+
+            // Use the fast ORC serialization method that emulates SerializationUtils.writeBigInteger
+            // provided by HiveDecimalWritable.
+            value.serializationUtilsWrite(
+                valueStream,
+                scratchLongs);
             scaleStream.write(value.scale());
           }
         }
       } else {
         for(int i=0; i < length; ++i) {
           if (vec.noNulls || !vec.isNull[i + offset]) {
-            HiveDecimal value = vec.vector[i + offset].getHiveDecimal();
-            SerializationUtils.writeBigInteger(valueStream,
-                value.unscaledValue());
+            HiveDecimalWritable value = vec.vector[i + offset];
+            value.serializationUtilsWrite(
+                valueStream,
+                scratchLongs);
             scaleStream.write(value.scale());
             indexStatistics.updateDecimal(value);
             if (createBloomFilter) {
-              bloomFilter.addString(value.toString());
+              bloomFilter.addString(value.toString(scratchBuffer));
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/orc/src/test/org/apache/orc/TestColumnStatistics.java
----------------------------------------------------------------------
diff --git a/orc/src/test/org/apache/orc/TestColumnStatistics.java b/orc/src/test/org/apache/orc/TestColumnStatistics.java
index 1837dbb..93d4bdb 100644
--- a/orc/src/test/org/apache/orc/TestColumnStatistics.java
+++ b/orc/src/test/org/apache/orc/TestColumnStatistics.java
@@ -30,6 +30,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
@@ -170,17 +171,17 @@ public class TestColumnStatistics {
 
     ColumnStatisticsImpl stats1 = ColumnStatisticsImpl.create(schema);
     ColumnStatisticsImpl stats2 = ColumnStatisticsImpl.create(schema);
-    stats1.updateDecimal(HiveDecimal.create(10));
-    stats1.updateDecimal(HiveDecimal.create(100));
-    stats2.updateDecimal(HiveDecimal.create(1));
-    stats2.updateDecimal(HiveDecimal.create(1000));
+    stats1.updateDecimal(new HiveDecimalWritable(10));
+    stats1.updateDecimal(new HiveDecimalWritable(100));
+    stats2.updateDecimal(new HiveDecimalWritable(1));
+    stats2.updateDecimal(new HiveDecimalWritable(1000));
     stats1.merge(stats2);
     DecimalColumnStatistics typed = (DecimalColumnStatistics) stats1;
     assertEquals(1, typed.getMinimum().longValue());
     assertEquals(1000, typed.getMaximum().longValue());
     stats1.reset();
-    stats1.updateDecimal(HiveDecimal.create(-10));
-    stats1.updateDecimal(HiveDecimal.create(10000));
+    stats1.updateDecimal(new HiveDecimalWritable(-10));
+    stats1.updateDecimal(new HiveDecimalWritable(10000));
     stats1.merge(stats2);
     assertEquals(-10, typed.getMinimum().longValue());
     assertEquals(10000, typed.getMaximum().longValue());

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
index 9a48171..b532e2f 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
@@ -61,12 +61,11 @@ public class <ClassName> extends VectorAggregateExpression {
       }
 
       public void checkValue(HiveDecimalWritable writable, short scale) {
-        HiveDecimal value = writable.getHiveDecimal();
         if (isNull) {
           isNull = false;
-          this.value.set(value);
-        } else if (this.value.getHiveDecimal().compareTo(value) <OperatorSymbol> 0) {
-          this.value.set(value);
+          this.value.set(writable);
+        } else if (this.value.compareTo(writable) <OperatorSymbol> 0) {
+          this.value.set(writable);
         }
       }
 
@@ -321,8 +320,7 @@ public class <ClassName> extends VectorAggregateExpression {
           if (inputVector.noNulls &&
             (myagg.isNull || (myagg.value.compareTo(vector[0]) <OperatorSymbol> 0))) {
             myagg.isNull = false;
-            HiveDecimal value = vector[0].getHiveDecimal();
-            myagg.value.set(value);
+            myagg.value.set(vector[0]);
           }
           return;
         }
@@ -354,13 +352,13 @@ public class <ClassName> extends VectorAggregateExpression {
       for (int j=0; j< batchSize; ++j) {
         int i = selected[j];
         if (!isNull[i]) {
-          HiveDecimal value = vector[i].getHiveDecimal();
+          HiveDecimalWritable writable = vector[i];
           if (myagg.isNull) {
             myagg.isNull = false;
-            myagg.value.set(value);
+            myagg.value.set(writable);
           }
-          else if (myagg.value.getHiveDecimal().compareTo(value) <OperatorSymbol> 0) {
-            myagg.value.set(value);
+          else if (myagg.value.compareTo(writable) <OperatorSymbol> 0) {
+            myagg.value.set(writable);
           }
         }
       }
@@ -374,15 +372,14 @@ public class <ClassName> extends VectorAggregateExpression {
         int[] selected) {
 
       if (myagg.isNull) {
-        HiveDecimal value = vector[selected[0]].getHiveDecimal();
-        myagg.value.set(value);
+        myagg.value.set(vector[selected[0]]);
         myagg.isNull = false;
       }
 
       for (int i=0; i< batchSize; ++i) {
-        HiveDecimal value = vector[selected[i]].getHiveDecimal();
-        if (myagg.value.getHiveDecimal().compareTo(value) <OperatorSymbol> 0) {
-          myagg.value.set(value);
+        HiveDecimalWritable writable = vector[selected[i]];
+        if (myagg.value.compareTo(writable) <OperatorSymbol> 0) {
+          myagg.value.set(writable);
         }
       }
     }
@@ -396,13 +393,13 @@ public class <ClassName> extends VectorAggregateExpression {
 
       for(int i=0;i<batchSize;++i) {
         if (!isNull[i]) {
-          HiveDecimal value = vector[i].getHiveDecimal();
+          HiveDecimalWritable writable = vector[i];
           if (myagg.isNull) {
-            myagg.value.set(value);
+            myagg.value.set(writable);
             myagg.isNull = false;
           }
-          else if (myagg.value.getHiveDecimal().compareTo(value) <OperatorSymbol> 0) {
-            myagg.value.set(value);
+          else if (myagg.value.compareTo(writable) <OperatorSymbol> 0) {
+            myagg.value.set(writable);
           }
         }
       }
@@ -414,15 +411,14 @@ public class <ClassName> extends VectorAggregateExpression {
         short scale,
         int batchSize) {
       if (myagg.isNull) {
-        HiveDecimal value = vector[0].getHiveDecimal();
-        myagg.value.set(value);
+        myagg.value.set(vector[0]);
         myagg.isNull = false;
       }
 
       for (int i=0;i<batchSize;++i) {
-        HiveDecimal value = vector[i].getHiveDecimal();
-        if (myagg.value.getHiveDecimal().compareTo(value) <OperatorSymbol> 0) {
-          myagg.value.set(value);
+        HiveDecimalWritable writable = vector[i];
+        if (myagg.value.compareTo(writable) <OperatorSymbol> 0) {
+          myagg.value.set(writable);
         }
       }
     }
@@ -472,4 +468,3 @@ public class <ClassName> extends VectorAggregateExpression {
     this.inputExpression = inputExpression;
   }
 }
-

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index d31d338..fc82cf7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -534,8 +534,9 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
           }
           break;
         case DECIMAL:
+          // The DecimalColumnVector set method will quickly copy the deserialized decimal writable fields.
           ((DecimalColumnVector) batch.cols[projectionColumnNum]).set(
-              batchIndex, deserializeRead.currentHiveDecimalWritable.getHiveDecimal());
+              batchIndex, deserializeRead.currentHiveDecimalWritable);
           break;
         case INTERVAL_YEAR_MONTH:
           ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] =

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index e6dc9ec..631dcb2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -310,8 +310,9 @@ public class VectorExtractRow {
             return primitiveWritable;
           }
         case DECIMAL:
+          // The HiveDecimalWritable set method will quickly copy the deserialized decimal writable fields.
           ((HiveDecimalWritable) primitiveWritable).set(
-              ((DecimalColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex].getHiveDecimal());
+              ((DecimalColumnVector) batch.cols[projectionColumnNum]).vector[adjustedIndex]);
           return primitiveWritable;
         case INTERVAL_YEAR_MONTH:
           ((HiveIntervalYearMonthWritable) primitiveWritable).set(

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
index 8a101a6..2bd1850 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
@@ -109,7 +109,8 @@ public class VectorHashKeyWrapper extends KeyWrapper {
         Arrays.hashCode(isNull);
 
     for (int i = 0; i < decimalValues.length; i++) {
-      hashcode ^= decimalValues[i].getHiveDecimal().hashCode();
+      // Use the new faster hash code since we are hashing memory objects.
+      hashcode ^= decimalValues[i].newFasterHashCode();
     }
 
     for (int i = 0; i < timestampValues.length; i++) {

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
index bfd26ae..b4708b5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
@@ -773,7 +773,7 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
     } else if (klh.decimalIndex >= 0) {
       return kw.getIsDecimalNull(klh.decimalIndex)? null :
           keyOutputWriter.writeValue(
-                kw.getDecimal(klh.decimalIndex).getHiveDecimal());
+                kw.getDecimal(klh.decimalIndex));
     } else if (klh.timestampIndex >= 0) {
       return kw.getIsTimestampNull(klh.timestampIndex)? null :
           keyOutputWriter.writeValue(

http://git-wip-us.apache.org/repos/asf/hive/blob/4ba713cc/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index 6af3d99..a95098a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -497,7 +497,10 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
 
       if (colVector.isRepeating) {
         if (colVector.noNulls || !colVector.isNull[0]) {
-          serializeWrite.writeHiveDecimal(colVector.vector[0].getHiveDecimal(), colVector.scale);
+          // We serialize specifying the HiveDecimalWritable but also the desired
+          // serialization scale that will be used by text serialization for adding
+          // trailing fractional zeroes.
+          serializeWrite.writeHiveDecimal(colVector.vector[0], colVector.scale);
           return true;
         } else {
           serializeWrite.writeNull();
@@ -505,7 +508,7 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
         }
       } else {
         if (colVector.noNulls || !colVector.isNull[batchIndex]) {
-          serializeWrite.writeHiveDecimal(colVector.vector[batchIndex].getHiveDecimal(), colVector.scale);
+          serializeWrite.writeHiveDecimal(colVector.vector[batchIndex], colVector.scale);
           return true;
         } else {
           serializeWrite.writeNull();