You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/11/07 21:41:45 UTC

svn commit: r1637444 [8/20] - in /hive/branches/spark: ./ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/conf/ com...

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java Fri Nov  7 20:41:34 2014
@@ -19,7 +19,8 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import junit.framework.Assert;
-import org.apache.hadoop.hive.common.type.Decimal128;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.junit.Test;
@@ -32,183 +33,198 @@ public class TestDecimalUtil {
   @Test
   public void testFloor() {
     DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 13);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
-    Decimal128 expected1 = new Decimal128(19, (short)0);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
+    HiveDecimal expected1 = HiveDecimal.create("19");
     DecimalUtil.floor(0, d1, dcv);
-    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d2 = new Decimal128(23.0, (short) 5);
-    Decimal128 expected2 = new Decimal128(23, (short)0);
+    HiveDecimal d2 = HiveDecimal.create("23.00000");
+    Assert.assertEquals(5, d2.scale());
+    HiveDecimal expected2 = HiveDecimal.create("23");
     DecimalUtil.floor(0, d2, dcv);
-    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d3 = new Decimal128(-25.34567, (short) 5);
-    Decimal128 expected3 = new Decimal128(-26, (short)0);
+    HiveDecimal d3 = HiveDecimal.create("-25.34567");
+    HiveDecimal expected3 = HiveDecimal.create("-26");
     DecimalUtil.floor(0, d3, dcv);
-    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d4 = new Decimal128(-17, (short) 5);
-    Decimal128 expected4 = new Decimal128(-17, (short)0);
+    HiveDecimal d4 = HiveDecimal.create("-17.00000");
+    Assert.assertEquals(5, d4.scale());
+    HiveDecimal expected4 = HiveDecimal.create("-17");
     DecimalUtil.floor(0, d4, dcv);
-    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d5 = new Decimal128(-0.3, (short) 5);
-    Decimal128 expected5 = new Decimal128(-1, (short)0);
+    HiveDecimal d5 = HiveDecimal.create("-0.30000");
+    Assert.assertEquals(5, d5.scale());
+    HiveDecimal expected5 = HiveDecimal.create("-1");
     DecimalUtil.floor(0, d5, dcv);
-    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d6 = new Decimal128(0.3, (short) 5);
-    Decimal128 expected6 = new Decimal128(0, (short)0);
+    HiveDecimal d6 = HiveDecimal.create("0.30000");
+    Assert.assertEquals(5, d6.scale());
+    HiveDecimal expected6 = HiveDecimal.create("0");
     DecimalUtil.floor(0, d6, dcv);
-    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
   }
 
   @Test
   public void testCeiling() {
     DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 13);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
-    Decimal128 expected1 = new Decimal128(20, (short)0);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
+    HiveDecimal expected1 = HiveDecimal.create("20");
     DecimalUtil.ceiling(0, d1, dcv);
-    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d2 = new Decimal128(23.0, (short) 5);
-    Decimal128 expected2 = new Decimal128(23, (short)0);
+    HiveDecimal d2 = HiveDecimal.create("23.00000");
+    Assert.assertEquals(5, d2.scale());
+    HiveDecimal expected2 = HiveDecimal.create("23");
     DecimalUtil.ceiling(0, d2, dcv);
-    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d3 = new Decimal128(-25.34567, (short) 5);
-    Decimal128 expected3 = new Decimal128(-25, (short)0);
+    HiveDecimal d3 = HiveDecimal.create("-25.34567");
+    HiveDecimal expected3 = HiveDecimal.create("-25");
     DecimalUtil.ceiling(0, d3, dcv);
-    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d4 = new Decimal128(-17, (short) 5);
-    Decimal128 expected4 = new Decimal128(-17, (short)0);
+    HiveDecimal d4 = HiveDecimal.create("-17.00000");
+    Assert.assertEquals(5, d4.scale());
+    HiveDecimal expected4 = HiveDecimal.create("-17");
     DecimalUtil.ceiling(0, d4, dcv);
-    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d5 = new Decimal128(-0.3, (short) 5);
-    Decimal128 expected5 = new Decimal128(0, (short)0);
+    HiveDecimal d5 = HiveDecimal.create("-0.30000");
+    Assert.assertEquals(5, d5.scale());
+    HiveDecimal expected5 = HiveDecimal.create("0");
     DecimalUtil.ceiling(0, d5, dcv);
-    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d6 = new Decimal128(0.3, (short) 5);
-    Decimal128 expected6 = new Decimal128(1, (short)0);
+    HiveDecimal d6 = HiveDecimal.create("0.30000");
+    Assert.assertEquals(5, d6.scale());
+    HiveDecimal expected6 = HiveDecimal.create("1");
     DecimalUtil.ceiling(0, d6, dcv);
-    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
   }
 
   @Test
   public void testAbs() {
     DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 13);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
     DecimalUtil.abs(0, d1, dcv);
-    Assert.assertEquals(0, d1.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, d1.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d2 = new Decimal128(-25.34567, (short) 5);
-    Decimal128 expected2 = new Decimal128(25.34567, (short)5);
+    HiveDecimal d2 = HiveDecimal.create("-25.34567");
+    HiveDecimal expected2 = HiveDecimal.create("25.34567");
     DecimalUtil.abs(0, d2, dcv);
-    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
   }
 
   @Test
   public void testRound() {
     DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 0);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
-    Decimal128 expected1 = new Decimal128(20, (short)0);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
+    HiveDecimal expected1 = HiveDecimal.create("20");
     DecimalUtil.round(0, d1, dcv);
-    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d2 = new Decimal128(23.0, (short) 5);
-    Decimal128 expected2 = new Decimal128(23, (short)0);
+    HiveDecimal d2 = HiveDecimal.create("23.00000");
+    Assert.assertEquals(5, d2.scale());
+    HiveDecimal expected2 = HiveDecimal.create("23");
     DecimalUtil.round(0, d2, dcv);
-    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d3 = new Decimal128(-25.34567, (short) 5);
-    Decimal128 expected3 = new Decimal128(-25, (short)0);
+    HiveDecimal d3 = HiveDecimal.create("-25.34567");
+    HiveDecimal expected3 = HiveDecimal.create("-25");
     DecimalUtil.round(0, d3, dcv);
-    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d4 = new Decimal128(-17, (short) 5);
-    Decimal128 expected4 = new Decimal128(-17, (short)0);
+    HiveDecimal d4 = HiveDecimal.create("-17.00000");
+    Assert.assertEquals(5, d4.scale());
+    HiveDecimal expected4 = HiveDecimal.create("-17");
     DecimalUtil.round(0, d4, dcv);
-    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d5 = new Decimal128(19.36778, (short) 5);
-    Decimal128 expected5 = new Decimal128(19, (short)0);
+    HiveDecimal d5 = HiveDecimal.create("19.36778");
+    HiveDecimal expected5 = HiveDecimal.create("19");
     DecimalUtil.round(0, d5, dcv);
-    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d6 = new Decimal128(-25.54567, (short) 5);
-    Decimal128 expected6 = new Decimal128(-26, (short)0);
+    HiveDecimal d6 = HiveDecimal.create("-25.54567");
+    HiveDecimal expected6 = HiveDecimal.create("-26");
     DecimalUtil.round(0, d6, dcv);
-    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
   }
 
   @Test
   public void testRoundWithDigits() {
     DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 3);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
-    Decimal128 expected1 = new Decimal128(19.568, (short)3);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
+    HiveDecimal expected1 = HiveDecimal.create("19.568");
     DecimalUtil.round(0, d1, dcv);
-    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d2 = new Decimal128(23.567, (short) 5);
-    Decimal128 expected2 = new Decimal128(23.567, (short)3);
+    HiveDecimal d2 = HiveDecimal.create("23.56700");
+    Assert.assertEquals(5, d2.scale());
+    HiveDecimal expected2 = HiveDecimal.create("23.567");
     DecimalUtil.round(0, d2, dcv);
-    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d3 = new Decimal128(-25.34567, (short) 5);
-    Decimal128 expected3 = new Decimal128(-25.346, (short)3);
+    HiveDecimal d3 = HiveDecimal.create("-25.34567");
+    HiveDecimal expected3 = HiveDecimal.create("-25.346");
     DecimalUtil.round(0, d3, dcv);
-    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d4 = new Decimal128(-17.234, (short) 5);
-    Decimal128 expected4 = new Decimal128(-17.234, (short)3);
+    HiveDecimal d4 = HiveDecimal.create("-17.23400");
+    Assert.assertEquals(5, d4.scale());
+    HiveDecimal expected4 = HiveDecimal.create("-17.234");
     DecimalUtil.round(0, d4, dcv);
-    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d5 = new Decimal128(19.36748, (short) 5);
-    Decimal128 expected5 = new Decimal128(19.367, (short)3);
+    HiveDecimal d5 = HiveDecimal.create("19.36748");
+    HiveDecimal expected5 = HiveDecimal.create("19.367");
     DecimalUtil.round(0, d5, dcv);
-    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d6 = new Decimal128(-25.54537, (short) 5);
-    Decimal128 expected6 = new Decimal128(-25.545, (short)3);
+    HiveDecimal d6 = HiveDecimal.create("-25.54537");
+    HiveDecimal expected6 = HiveDecimal.create("-25.545");
     DecimalUtil.round(0, d6, dcv);
-    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
   }
 
   @Test
   public void testNegate() {
     DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 13);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
-    Decimal128 expected1 = new Decimal128(-19.56778, (short)5);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
+    HiveDecimal expected1 = HiveDecimal.create("-19.56778");
     DecimalUtil.negate(0, d1, dcv);
-    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d2 = new Decimal128(-25.34567, (short) 5);
-    Decimal128 expected2 = new Decimal128(25.34567, (short)5);
+    HiveDecimal d2 = HiveDecimal.create("-25.34567");
+    HiveDecimal expected2 = HiveDecimal.create("25.34567");
     DecimalUtil.negate(0, d2, dcv);
-    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
 
-    Decimal128 d3 = new Decimal128(0, (short) 5);
-    Decimal128 expected3 = new Decimal128(0, (short)0);
+    HiveDecimal d3 = HiveDecimal.create("0.00000");
+    Assert.assertEquals(5, d3.scale());
+    HiveDecimal expected3 = HiveDecimal.create("0");
     DecimalUtil.negate(0, d3, dcv);
-    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0]));
+    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
   }
 
   @Test
   public void testSign() {
     LongColumnVector lcv = new LongColumnVector(4);
-    Decimal128 d1 = new Decimal128(19.56778, (short) 5);
+    HiveDecimal d1 = HiveDecimal.create("19.56778");
     DecimalUtil.sign(0, d1, lcv);
     Assert.assertEquals(1, lcv.vector[0]);
 
-    Decimal128 d2 = new Decimal128(-25.34567, (short) 5);
+    HiveDecimal d2 = HiveDecimal.create("-25.34567");
     DecimalUtil.sign(0, d2, lcv);
     Assert.assertEquals(-1, lcv.vector[0]);
 
-    Decimal128 d3 = new Decimal128(0, (short) 5);
+    HiveDecimal d3 = HiveDecimal.create("0.00000");
+    Assert.assertEquals(5, d3.scale());
+    d3.setScale(5);
     DecimalUtil.sign(0, d3, lcv);
     Assert.assertEquals(0, lcv.vector[0]);
   }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java Fri Nov  7 20:41:34 2014
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertTrue;
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -315,9 +315,9 @@ public class TestVectorArithmeticExpress
 
     // test without nulls
     expr.evaluate(b);
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-2.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("1.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1.00")));
 
     // test nulls propagation
     b = getVectorizedRowBatch3DecimalCols();
@@ -330,18 +330,18 @@ public class TestVectorArithmeticExpress
 
     // Verify null output data entry is not 0, but rather the value specified by design,
     // which is the minimum non-0 value, 0.01 in this case.
-    assertTrue(r.vector[0].equals(new Decimal128("0.01", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.01")));
 
     // test that overflow produces NULL
     b = getVectorizedRowBatch3DecimalCols();
     c0 = (DecimalColumnVector) b.cols[0];
-    c0.vector[0].update("9999999999999999.99", (short) 2); // set to max possible value
+    c0.vector[0].set(HiveDecimal.create("9999999999999999.99")); // set to max possible value
     r = (DecimalColumnVector) b.cols[2];
     expr.evaluate(b); // will cause overflow for result at position 0, must yield NULL
     assertTrue(!r.noNulls && r.isNull[0]);
 
     // verify proper null output data value
-    assertTrue(r.vector[0].equals(new Decimal128("0.01", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.01")));
 
     // test left input repeating
     b = getVectorizedRowBatch3DecimalCols();
@@ -349,25 +349,25 @@ public class TestVectorArithmeticExpress
     c0.isRepeating = true;
     r = (DecimalColumnVector) b.cols[2];
     expr.evaluate(b);
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("2.20", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("2.20", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("2.20")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("2.20")));
 
     // test both inputs repeating
     DecimalColumnVector c1 = (DecimalColumnVector) b.cols[1];
     c1.isRepeating = true;
     expr.evaluate(b);
     assertTrue(r.isRepeating);
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
 
     // test right input repeating
     b = getVectorizedRowBatch3DecimalCols();
     c1 = (DecimalColumnVector) b.cols[1];
     c1.isRepeating = true;
-    c1.vector[0].update("2", (short) 2);
+    c1.vector[0].set(HiveDecimal.create("2.00"));
     r = (DecimalColumnVector) b.cols[2];
     expr.evaluate(b);
-    assertTrue(r.vector[2].equals(new Decimal128("2", (short) 2)));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("2.00")));
   }
 
   // Spot check decimal column-column subtract
@@ -379,14 +379,14 @@ public class TestVectorArithmeticExpress
 
     // test without nulls
     expr.evaluate(b);
-    assertTrue(r.vector[0].equals(new Decimal128("0.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-4.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("-1.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-4.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("-1.00")));
 
     // test that underflow produces NULL
     b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector c0 = (DecimalColumnVector) b.cols[0];
-    c0.vector[0].update("-9999999999999999.99", (short) 2); // set to min possible value
+    c0.vector[0].set(HiveDecimal.create("-9999999999999999.99")); // set to min possible value
     r = (DecimalColumnVector) b.cols[2];
     expr.evaluate(b); // will cause underflow for result at position 0, must yield NULL
     assertTrue(!r.noNulls && r.isNull[0]);
@@ -401,16 +401,16 @@ public class TestVectorArithmeticExpress
 
     // test without nulls
     expr.evaluate(b);
-    assertTrue(r.vector[0].equals(new Decimal128("1.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-3.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("1.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-3.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0.00")));
 
     // test that underflow produces NULL
     b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector c0 = (DecimalColumnVector) b.cols[0];
-    c0.vector[0].update("9999999999999999.99", (short) 2); // set to max possible value
+    c0.vector[0].set(HiveDecimal.create("9999999999999999.99")); // set to max possible value
     DecimalColumnVector c1 = (DecimalColumnVector) b.cols[1];
-    c1.vector[0].update("2", (short) 2);
+    c1.vector[0].set(HiveDecimal.create("2.00"));
     r = (DecimalColumnVector) b.cols[2];
     expr.evaluate(b); // will cause overflow for result at position 0, must yield NULL
     assertTrue(!r.noNulls && r.isNull[0]);
@@ -422,15 +422,15 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalColAddDecimalScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128(1);
+    HiveDecimal d = HiveDecimal.create(1);
     VectorExpression expr = new DecimalColAddDecimalScalar(0, d, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-2.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("1.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1")));
 
     // test null propagation
     b = getVectorizedRowBatch3DecimalCols();
@@ -449,7 +449,7 @@ public class TestVectorArithmeticExpress
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertTrue(r.isRepeating);
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
 
     // test repeating case for null value
     b = getVectorizedRowBatch3DecimalCols();
@@ -466,7 +466,7 @@ public class TestVectorArithmeticExpress
     // test that overflow produces null
     b = getVectorizedRowBatch3DecimalCols();
     in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("9999999999999999.99", (short) 2); // set to max possible value
+    in.vector[0].set(HiveDecimal.create("9999999999999999.99")); // set to max possible value
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -480,16 +480,16 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalColDivideDecimalScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128("2.00", (short) 2);
+    HiveDecimal d = HiveDecimal.create("2.00");
     VectorExpression expr = new DecimalColDivideDecimalScalar(0, d, 2);
 
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("0.60", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-1.65", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.6")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.65")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
 
     // test null propagation
     b = getVectorizedRowBatch3DecimalCols();
@@ -508,7 +508,7 @@ public class TestVectorArithmeticExpress
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertTrue(r.isRepeating);
-    assertTrue(r.vector[0].equals(new Decimal128("0.60", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.6")));
 
     // test repeating case for null value
     b = getVectorizedRowBatch3DecimalCols();
@@ -525,7 +525,7 @@ public class TestVectorArithmeticExpress
     // test that zero-divide produces null for all output values
     b = getVectorizedRowBatch3DecimalCols();
     in = (DecimalColumnVector) b.cols[0];
-    expr = new DecimalColDivideDecimalScalar(0, new Decimal128("0", (short) 2), 2);
+    expr = new DecimalColDivideDecimalScalar(0, HiveDecimal.create("0"), 2);
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -539,14 +539,14 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalScalarDivideDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128("3.96", (short) 2);  // 1.20 * 3.30
+    HiveDecimal d = HiveDecimal.create("3.96");  // 1.20 * 3.30
     VectorExpression expr = new DecimalScalarDivideDecimalColumn(d, 0, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("3.30", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-1.20", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("3.3")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.2")));
     assertFalse(r.noNulls); // entry 2 is null due to zero-divide
     assertTrue(r.isNull[2]);
 
@@ -567,7 +567,7 @@ public class TestVectorArithmeticExpress
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertTrue(r.isRepeating);
-    assertTrue(r.vector[0].equals(new Decimal128("3.30", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("3.3")));
 
     // test repeating case for null value
     b = getVectorizedRowBatch3DecimalCols();
@@ -586,30 +586,32 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalColModuloDecimalScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128("2.00", (short) 2);
+    HiveDecimal d = HiveDecimal.create("2.00");
     VectorExpression expr = new DecimalColModuloDecimalScalar(0, d, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("1.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-1.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("1.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
 
     // try again with some different data values and divisor
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("15.40", (short) 2);
-    in.vector[1].update("-17.20", (short) 2);
-    in.vector[2].update("70.00", (short) 2);
-    d.update("4.75", (short) 2);
+    in.vector[0].set(HiveDecimal.create("15.40"));
+    in.vector[1].set(HiveDecimal.create("-17.20"));
+    in.vector[2].set(HiveDecimal.create("70.00"));
+    d = HiveDecimal.create("4.75");
+    expr = new DecimalColModuloDecimalScalar(0, d, 2);
 
     expr.evaluate(b);
-    assertTrue(r.vector[0].equals(new Decimal128("1.15", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-2.95", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("3.50", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("1.15")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.95")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("3.50")));
 
     // try a zero-divide to show a repeating NULL is produced
-    d.update("0", (short) 2);
+    d = HiveDecimal.create("0.00");
+    expr = new DecimalColModuloDecimalScalar(0, d, 2);
     expr.evaluate(b);
     assertFalse(r.noNulls);
     assertTrue(r.isNull[0]);
@@ -620,27 +622,28 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalScalarModuloDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128("2.00", (short) 2);
+    HiveDecimal d = HiveDecimal.create("2.00");
     VectorExpression expr = new DecimalScalarModuloDecimalColumn(d, 0, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("0.80", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("2.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.80")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("2.00")));
     assertFalse(r.noNulls); // entry 2 will be null due to zero-divide
     assertTrue(r.isNull[2]);
 
     // try again with some different data values
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("0.50", (short) 2);
-    in.vector[1].update("0.80", (short) 2);
-    in.vector[2].update("0.70", (short) 2);
+    expr = new DecimalScalarModuloDecimalColumn(d, 0, 2);
+    in.vector[0].set(HiveDecimal.create("0.50"));
+    in.vector[1].set(HiveDecimal.create("0.80"));
+    in.vector[2].set(HiveDecimal.create("0.70"));
 
     expr.evaluate(b);
-    assertTrue(r.vector[0].equals(new Decimal128("0.00", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("0.40", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0.60", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.00")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("0.40")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0.60")));
   }
 
   @Test
@@ -648,16 +651,16 @@ public class TestVectorArithmeticExpress
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector in1 = (DecimalColumnVector) b.cols[1];
     for (int i = 0; i < 3; i++) {
-      in1.vector[i] = new Decimal128("0.50", (short) 2);
+      in1.vector[i].set(HiveDecimal.create("0.50"));
     }
     VectorExpression expr = new DecimalColDivideDecimalColumn(0, 1, 2);
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
 
     // all divides are by 0.50 so the result column is 2 times col 0.
-    assertTrue(r.vector[0].equals(new Decimal128("2.40", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-6.60", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.4")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-6.6")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
 
     // test null on left
     b.cols[0].noNulls = false;
@@ -692,14 +695,14 @@ public class TestVectorArithmeticExpress
     b.cols[0].isRepeating = true;
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[2].equals(new Decimal128("1.20", (short) 2)));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1.2")));
 
     // test repeating on right
     b = getVectorizedRowBatch3DecimalCols();
     b.cols[1].isRepeating = true;
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
 
     // test both repeating
     b = getVectorizedRowBatch3DecimalCols();
@@ -708,11 +711,11 @@ public class TestVectorArithmeticExpress
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertTrue(r.isRepeating);
-    assertTrue(r.vector[0].equals(new Decimal128("1.20", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("1.2")));
 
     // test zero-divide to show it results in NULL
     b = getVectorizedRowBatch3DecimalCols();
-    ((DecimalColumnVector) b.cols[1]).vector[0].update(0);
+    ((DecimalColumnVector) b.cols[1]).vector[0].set(HiveDecimal.create("0"));
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -725,15 +728,15 @@ public class TestVectorArithmeticExpress
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector in1 = (DecimalColumnVector) b.cols[1];
     for (int i = 0; i < 3; i++) {
-      in1.vector[i] = new Decimal128("0.50", (short) 2);
+      in1.vector[i].set(HiveDecimal.create("0.50"));
     }
     VectorExpression expr = new DecimalColModuloDecimalColumn(0, 1, 2);
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
 
-    assertTrue(r.vector[0].equals(new Decimal128("0.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-0.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-0.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
   }
 
   /* Spot check correctness of decimal column subtract decimal scalar. The case for
@@ -742,20 +745,20 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalColSubtractDecimalScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128(1);
+    HiveDecimal d = HiveDecimal.create(1);
     VectorExpression expr = new DecimalColSubtractDecimalScalar(0, d, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("0.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-4.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("-1.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-4.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("-1")));
 
     // test that underflow produces null
     b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("-9999999999999999.99", (short) 2); // set to min possible value
+    in.vector[0].set(HiveDecimal.create("-9999999999999999.99")); // set to min possible value
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -768,20 +771,20 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalColMultiplyDecimalScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128(2);
+    HiveDecimal d = HiveDecimal.create(2);
     VectorExpression expr = new DecimalColMultiplyDecimalScalar(0, d, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("2.40", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-6.60", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.40")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-6.60")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
 
     // test that overflow produces null
     b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("9999999999999999.99", (short) 2); // set to max possible value
+    in.vector[0].set(HiveDecimal.create("9999999999999999.99")); // set to max possible value
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -794,15 +797,15 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalScalarAddDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128(1);
+    HiveDecimal d = HiveDecimal.create(1);
     VectorExpression expr = new DecimalScalarAddDecimalColumn(d, 0, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-2.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("1.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1")));
 
     // test null propagation
     b = getVectorizedRowBatch3DecimalCols();
@@ -821,7 +824,7 @@ public class TestVectorArithmeticExpress
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertTrue(r.isRepeating);
-    assertTrue(r.vector[0].equals(new Decimal128("2.20", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
 
     // test repeating case for null value
     b = getVectorizedRowBatch3DecimalCols();
@@ -838,7 +841,7 @@ public class TestVectorArithmeticExpress
     // test that overflow produces null
     b = getVectorizedRowBatch3DecimalCols();
     in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("9999999999999999.99", (short) 2); // set to max possible value
+    in.vector[0].set(HiveDecimal.create("9999999999999999.99")); // set to max possible value
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -851,20 +854,20 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalScalarSubtractDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128(1);
+    HiveDecimal d = HiveDecimal.create(1);
     VectorExpression expr = new DecimalScalarSubtractDecimalColumn(d, 0, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("-0.20", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("4.30", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("1.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("-0.20")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("4.30")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1")));
 
     // test that overflow produces null
     b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("-9999999999999999.99", (short) 2); // set to min possible value
+    in.vector[0].set(HiveDecimal.create("-9999999999999999.99")); // set to min possible value
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -878,20 +881,20 @@ public class TestVectorArithmeticExpress
   @Test
   public void testDecimalScalarMultiplyDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
-    Decimal128 d = new Decimal128(2);
+    HiveDecimal d = HiveDecimal.create(2);
     VectorExpression expr = new DecimalScalarMultiplyDecimalColumn(d, 0, 2);
 
     // test without nulls
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
-    assertTrue(r.vector[0].equals(new Decimal128("2.40", (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-6.60", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("0", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.40")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-6.60")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
 
     // test that overflow produces null
     b = getVectorizedRowBatch3DecimalCols();
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[0].update("9999999999999999.99", (short) 2); // set to max possible value
+    in.vector[0].set(HiveDecimal.create("9999999999999999.99")); // set to max possible value
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[2];
     assertFalse(r.noNulls);
@@ -905,13 +908,13 @@ public class TestVectorArithmeticExpress
     b.cols[0] = v0 = new DecimalColumnVector(18, 2);
     b.cols[1] = v1 = new DecimalColumnVector(18, 2);
     b.cols[2] = new DecimalColumnVector(18, 2);
-    v0.vector[0].update("1.20", (short) 2);
-    v0.vector[1].update("-3.30", (short) 2);
-    v0.vector[2].update("0", (short) 2);
-
-    v1.vector[0].update("1.00", (short) 2);
-    v1.vector[1].update("1.00", (short) 2);
-    v1.vector[2].update("1.00", (short) 2);
+    v0.vector[0].set(HiveDecimal.create("1.20"));
+    v0.vector[1].set(HiveDecimal.create("-3.30"));
+    v0.vector[2].set(HiveDecimal.create("0"));
+
+    v1.vector[0].set(HiveDecimal.create("1.00"));
+    v1.vector[1].set(HiveDecimal.create("1.00"));
+    v1.vector[2].set(HiveDecimal.create("1.00"));
 
     b.size = 3;
 

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java Fri Nov  7 20:41:34 2014
@@ -84,8 +84,8 @@ public class TestVectorExpressionWriters
     return null;
   }
 
-  private Writable getWritableValue(TypeInfo ti, Decimal128 value) {
-    return new HiveDecimalWritable(HiveDecimal.create(value.toBigDecimal()));
+  private Writable getWritableValue(TypeInfo ti, HiveDecimal value) {
+    return new HiveDecimalWritable(value);
   }
 
   private Writable getWritableValue(TypeInfo ti, byte[] value) {
@@ -163,7 +163,7 @@ public class TestVectorExpressionWriters
     for (int i = 0; i < vectorSize; i++) {
       Writable w = (Writable) vew.writeValue(dcv, i);
       if (w != null) {
-        Writable expected = getWritableValue(type, dcv.vector[i]);
+        Writable expected = getWritableValue(type, dcv.vector[i].getHiveDecimal());
         Assert.assertEquals(expected, w);
       } else {
         Assert.assertTrue(dcv.isNull[i]);
@@ -182,7 +182,7 @@ public class TestVectorExpressionWriters
       values[i] = null;  // setValue() should be able to handle null input
       values[i] = vew.setValue(values[i], dcv, i);
       if (values[i] != null) {
-        Writable expected = getWritableValue(type, dcv.vector[i]);
+        Writable expected = getWritableValue(type, dcv.vector[i].getHiveDecimal());
         Assert.assertEquals(expected, values[i]);
       } else {
         Assert.assertTrue(dcv.isNull[i]);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java Fri Nov  7 20:41:34 2014
@@ -24,7 +24,7 @@ import static org.junit.Assert.assertTru
 
 import java.sql.Timestamp;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -830,8 +830,7 @@ public class TestVectorFilterExpressions
   @Test
   public void testFilterDecimalColEqualDecimalScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
-    Decimal128 scalar = new Decimal128();
-    scalar.update("-3.30", (short) 2);
+    HiveDecimal scalar = HiveDecimal.create("-3.30");
     VectorExpression expr = new FilterDecimalColEqualDecimalScalar(0, scalar);
     expr.evaluate(b);
 
@@ -876,8 +875,7 @@ public class TestVectorFilterExpressions
   @Test
   public void testFilterDecimalScalarEqualDecimalColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
-    Decimal128 scalar = new Decimal128();
-    scalar.update("-3.30", (short) 2);
+    HiveDecimal scalar = HiveDecimal.create("-3.30");
     VectorExpression expr = new FilterDecimalScalarEqualDecimalColumn(scalar, 0);
     expr.evaluate(b);
 
@@ -982,8 +980,7 @@ public class TestVectorFilterExpressions
   @Test
   public void testFilterDecimalColLessScalar() {
     VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
-    Decimal128 scalar = new Decimal128();
-    scalar.update("0", (short) 2);
+    HiveDecimal scalar = HiveDecimal.create("0");
     VectorExpression expr = new FilterDecimalColLessDecimalScalar(0, scalar);
     expr.evaluate(b);
 
@@ -999,8 +996,7 @@ public class TestVectorFilterExpressions
   @Test
   public void testFilterDecimalScalarGreaterThanColumn() {
     VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
-    Decimal128 scalar = new Decimal128();
-    scalar.update("0", (short) 2);
+    HiveDecimal scalar = HiveDecimal.create("0");
     VectorExpression expr = new FilterDecimalScalarGreaterDecimalColumn(scalar, 0);
     expr.evaluate(b);
 
@@ -1030,9 +1026,9 @@ public class TestVectorFilterExpressions
     VectorizedRowBatch b = new VectorizedRowBatch(1);
     DecimalColumnVector v0;
     b.cols[0] = v0 = new DecimalColumnVector(18, 2);
-    v0.vector[0].update("1.20", (short) 2);
-    v0.vector[1].update("-3.30", (short) 2);
-    v0.vector[2].update("0", (short) 2);
+    v0.vector[0].set(HiveDecimal.create("1.20"));
+    v0.vector[1].set(HiveDecimal.create("-3.30"));
+    v0.vector[2].set(HiveDecimal.create("0"));
 
     b.size = 3;
     return b;
@@ -1042,14 +1038,14 @@ public class TestVectorFilterExpressions
     VectorizedRowBatch b = new VectorizedRowBatch(2);
     DecimalColumnVector v0, v1;
     b.cols[0] = v0 = new DecimalColumnVector(18, 2);
-    v0.vector[0].update("1.20", (short) 2);
-    v0.vector[1].update("-3.30", (short) 2);
-    v0.vector[2].update("0", (short) 2);
+    v0.vector[0].set(HiveDecimal.create("1.20"));
+    v0.vector[1].set(HiveDecimal.create("-3.30"));
+    v0.vector[2].set(HiveDecimal.create("0"));
 
     b.cols[1] = v1 = new DecimalColumnVector(18, 2);
-    v1.vector[0].update("-1", (short) 2);
-    v1.vector[1].update("-3.30", (short) 2);
-    v1.vector[2].update("10", (short) 2);
+    v1.vector[0].set(HiveDecimal.create("-1.00"));
+    v1.vector[1].set(HiveDecimal.create("-3.30"));
+    v1.vector[2].set(HiveDecimal.create("10.00"));
 
     b.size = 3;
     return b;

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java Fri Nov  7 20:41:34 2014
@@ -28,6 +28,7 @@ import java.util.Arrays;
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -231,7 +232,7 @@ public class TestVectorTypeCasts {
     VectorizedRowBatch b = getBatchDecimalLong();
     VectorExpression expr = new CastDecimalToBoolean(0, 1);
     DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
-    in.vector[1].update(0);
+    in.vector[1].set(HiveDecimal.create(0));
     expr.evaluate(b);
     LongColumnVector r = (LongColumnVector) b.cols[1];
     assertEquals(1, r.vector[0]);
@@ -248,9 +249,9 @@ public class TestVectorTypeCasts {
 
     b.size = 3;
 
-    dv.vector[0].update("1.1", scale);
-    dv.vector[1].update("-2.2", scale);
-    dv.vector[2].update("9999999999999999.00", scale);
+    dv.vector[0].set(HiveDecimal.create("1.1").setScale(scale));
+    dv.vector[1].set(HiveDecimal.create("-2.2").setScale(scale));
+    dv.vector[2].set(HiveDecimal.create("9999999999999999.00").setScale(scale));
 
     return b;
   }
@@ -308,9 +309,9 @@ public class TestVectorTypeCasts {
 
     b.size = 3;
 
-    dv.vector[0].update("1.1", scale);
-    dv.vector[1].update("-2.2", scale);
-    dv.vector[2].update("9999999999999999.00", scale);
+    dv.vector[0].set(HiveDecimal.create("1.1").setScale(scale));
+    dv.vector[1].set(HiveDecimal.create("-2.2").setScale(scale));
+    dv.vector[2].set(HiveDecimal.create("9999999999999999.00").setScale(scale));
 
     return b;
   }
@@ -322,12 +323,13 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     BytesColumnVector r = (BytesColumnVector) b.cols[1];
 
-    byte[] v = toBytes("1.1");
+    byte[] v = toBytes("1.10");
+    assertTrue(((Integer) v.length).toString() + " " + r.length[0], v.length == r.length[0]);
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[0], r.start[0], r.length[0]));
 
-    v = toBytes("-2.2");
+    v = toBytes("-2.20");
     Assert.assertEquals(0,
         StringExpr.compare(v, 0, v.length,
             r.vector[1], r.start[1], r.length[1]));
@@ -347,9 +349,9 @@ public class TestVectorTypeCasts {
 
     b.size = 3;
 
-    dv.vector[0].update("1.1", scale);
-    dv.vector[1].update("-2.2", scale);
-    dv.vector[2].update("9999999999999999.00", scale);
+    dv.vector[0].set(HiveDecimal.create("1.1").setScale(scale));
+    dv.vector[1].set(HiveDecimal.create("-2.2").setScale(scale));
+    dv.vector[2].set(HiveDecimal.create("9999999999999999.00").setScale(scale));
 
     return b;
   }
@@ -374,9 +376,9 @@ public class TestVectorTypeCasts {
 
     b.size = 3;
 
-    dv.vector[0].update("1.111111111", scale);
-    dv.vector[1].update("-2.222222222", scale);
-    dv.vector[2].update("31536000.999999999", scale);
+    dv.vector[0].set(HiveDecimal.create("1.111111111").setScale(scale));
+    dv.vector[1].set(HiveDecimal.create("-2.222222222").setScale(scale));
+    dv.vector[2].set(HiveDecimal.create("31536000.999999999").setScale(scale));
 
     return b;
   }
@@ -387,9 +389,9 @@ public class TestVectorTypeCasts {
     VectorExpression expr = new CastLongToDecimal(0, 1);
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
-    assertTrue(r.vector[0].equals(new Decimal128(0, (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128(-1, (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128(99999999999999L, (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("99999999999999")));
   }
 
   private VectorizedRowBatch getBatchLongDecimal() {
@@ -410,9 +412,9 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
 
-    assertTrue(r.vector[0].equals(new Decimal128(0, r.scale)));
-    assertTrue(r.vector[1].equals(new Decimal128(-1, r.scale)));
-    assertTrue(r.vector[2].equals(new Decimal128("99999999999999.0", r.scale)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.0")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.0")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("99999999999999")));
   }
 
   private VectorizedRowBatch getBatchDoubleDecimal() {
@@ -437,9 +439,9 @@ public class TestVectorTypeCasts {
     VectorExpression expr = new CastStringToDecimal(0, 1);
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
-    assertTrue(r.vector[0].equals(new Decimal128("1.10", r.scale)));
-    assertTrue(r.vector[1].equals(new Decimal128("-2.20", r.scale)));
-    assertTrue(r.vector[2].equals(new Decimal128("99999999999999.0", r.scale)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("1.10")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.20")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("99999999999999.0")));
   }
 
   private VectorizedRowBatch getBatchStringDecimal() {
@@ -472,9 +474,9 @@ public class TestVectorTypeCasts {
     inL.vector[1] = -1990000000L;
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
-    assertTrue(r.vector[0].equals(new Decimal128(0, (short) 2)));
-    assertTrue(r.vector[1].equals(new Decimal128("-1.99", (short) 2)));
-    assertTrue(r.vector[2].equals(new Decimal128("100000.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.00")));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.99")));
+    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("100000.00")));
 
     // Try again with a value that won't fit in 5 digits, to make
     // sure that NULL is produced.
@@ -503,6 +505,7 @@ public class TestVectorTypeCasts {
     return b;
   }
 
+  /*
   @Test
   public void testCastDecimalToDecimal() {
 
@@ -511,7 +514,7 @@ public class TestVectorTypeCasts {
     VectorExpression expr = new CastDecimalToDecimal(0, 1);
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
-    assertTrue(r.vector[0].equals(new Decimal128("10.00", (short) 2)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("10.00", (short) 2)));
     assertFalse(r.noNulls);
     assertTrue(r.isNull[1]);
 
@@ -520,10 +523,11 @@ public class TestVectorTypeCasts {
     expr = new CastDecimalToDecimal(1, 0);
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[0];
-    assertTrue(r.vector[0].equals(new Decimal128("100.01", (short) 4)));
-    assertTrue(r.vector[1].equals(new Decimal128("-200.02", (short) 4)));
+    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("100.01", (short) 4)));
+    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-200.02", (short) 4)));
     assertTrue(r.noNulls);
   }
+  */
 
   private VectorizedRowBatch getBatchDecimalDecimal() {
     VectorizedRowBatch b = new VectorizedRowBatch(2);
@@ -532,11 +536,11 @@ public class TestVectorTypeCasts {
     b.cols[0] = v0 = new DecimalColumnVector(18, 4);
     b.cols[1] = v1 = new DecimalColumnVector(5, 2);
 
-    v0.vector[0].update(new Decimal128("10.0001", (short) 4));
-    v0.vector[1].update(new Decimal128("-9999999.9999", (short) 4));
+    v0.vector[0].set(HiveDecimal.create("10.0001"));
+    v0.vector[1].set(HiveDecimal.create("-9999999.9999"));
 
-    v1.vector[0].update(new Decimal128("100.01", (short) 2));
-    v1.vector[1].update(new Decimal128("-200.02", (short) 2));
+    v1.vector[0].set(HiveDecimal.create("100.01"));
+    v1.vector[1].set(HiveDecimal.create("-200.02"));
 
     b.size = 2;
     return b;

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java Fri Nov  7 20:41:34 2014
@@ -26,7 +26,7 @@ import java.util.regex.MatchResult;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -168,7 +168,7 @@ public class FakeVectorRowBatchFromObjec
                         int row,
                         Object value) {
                     DecimalColumnVector dcv = (DecimalColumnVector) columnVector;
-                    dcv.vector[row] = (Decimal128)value;
+                    dcv.set(row, (HiveDecimal) value);
                 }
             };
       } else {

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java Fri Nov  7 20:41:34 2014
@@ -21,10 +21,12 @@ package org.apache.hadoop.hive.ql.exec.v
 import java.util.Random;
 
 import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 
 public class VectorizedRowGroupGenUtil {
@@ -114,10 +116,10 @@ public class VectorizedRowGroupGenUtil {
     dcv.noNulls = !nulls;
     dcv.isRepeating = repeating;
 
-    Decimal128 repeatingValue = new Decimal128();
+    HiveDecimalWritable repeatingValue = new HiveDecimalWritable();
     do{
-      repeatingValue.update(rand.nextDouble(), (short)typeInfo.scale());
-    }while(repeatingValue.doubleValue() == 0);
+      repeatingValue.set(HiveDecimal.create(((Double) rand.nextDouble()).toString()).setScale((short)typeInfo.scale()));
+    }while(repeatingValue.getHiveDecimal().doubleValue() == 0);
 
     int nullFrequency = generateNullFrequency(rand);
 
@@ -129,12 +131,12 @@ public class VectorizedRowGroupGenUtil {
       }else {
         dcv.isNull[i] = false;
         if (repeating) {
-          dcv.vector[i].update(repeatingValue);
+          dcv.vector[i].set(repeatingValue);
         } else {
-          dcv.vector[i].update(rand.nextDouble(), (short) typeInfo.scale());
+          dcv.vector[i].set(HiveDecimal.create(((Double) rand.nextDouble()).toString()).setScale((short) typeInfo.scale()));
         }
 
-        if(dcv.vector[i].doubleValue() == 0) {
+        if(dcv.vector[i].getHiveDecimal().doubleValue() == 0) {
           i--;
         }
       }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Fri Nov  7 20:41:34 2014
@@ -51,7 +51,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
@@ -1466,8 +1465,8 @@ public class TestInputOutputFormat {
       assertEquals("checking double " + i, i, doubleCoulmn.vector[i], 0.0001);
       assertEquals("checking string " + i, new Text(Long.toHexString(i)),
           stringColumn.getWritableObject(i));
-      assertEquals("checking decimal " + i, new Decimal128(i),
-          decimalColumn.vector[i]);
+      assertEquals("checking decimal " + i, HiveDecimal.create(i),
+          decimalColumn.vector[i].getHiveDecimal());
       assertEquals("checking date " + i, i, dateColumn.vector[i]);
       long millis = (long) i * MILLIS_IN_DAY;
       millis -= LOCAL_TIMEZONE.getOffset(millis);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java Fri Nov  7 20:41:34 2014
@@ -143,22 +143,23 @@ public class TestParquetTimestampUtils e
     cal.set(Calendar.YEAR,  1968);
     cal.set(Calendar.MONTH, Calendar.MAY);
     cal.set(Calendar.DAY_OF_MONTH, 23);
-    if ((TimeZone.getTimeZone("US/Pacific").inDaylightTime(new Date()))) {
-      cal.set(Calendar.HOUR_OF_DAY, 18);
-    } else {
-      cal.set(Calendar.HOUR_OF_DAY, 17);
-    }
+    cal.set(Calendar.HOUR_OF_DAY, 17);
     cal.set(Calendar.MINUTE, 1);
     cal.set(Calendar.SECOND, 1);
     cal.setTimeZone(TimeZone.getTimeZone("US/Pacific"));
     Timestamp ts = new Timestamp(cal.getTimeInMillis());
     ts.setNanos(1);
 
-    //18:00 PST = 01:00 GMT (if daylight-savings)
-    //17:00 PST = 01:00 GMT (if not daylight savings)
-    //(1*60*60 + 1*60 + 1)*10e9 + 1
+    /**
+     * 17:00 PDT = 00:00 GMT (daylight-savings)
+     * (0*60*60 + 1*60 + 1)*10e9 + 1 = 61000000001, or
+     *
+     * 17:00 PST = 01:00 GMT (if not daylight savings)
+     * (1*60*60 + 1*60 + 1)*10e9 + 1 = 3661000000001
+     */
     NanoTime nt = NanoTimeUtils.getNanoTime(ts);
-    Assert.assertEquals(nt.getTimeOfDayNanos(), 3661000000001L);
+    long timeOfDayNanos = nt.getTimeOfDayNanos();
+    Assert.assertTrue(timeOfDayNanos == 61000000001L || timeOfDayNanos == 3661000000001L);
 
     //in both cases, this will be the next day in GMT
     Assert.assertEquals(nt.getJulianDay(), 2440001);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java Fri Nov  7 20:41:34 2014
@@ -87,7 +87,8 @@ public class TestSQLStdHiveAccessControl
 
     // create list with variables that match some of the regexes
     List<String> confVarRegexList = Arrays.asList("hive.convert.join.bucket.mapjoin.tez",
-        "hive.optimize.index.filter.compact.maxsize", "hive.tez.dummy", "tez.task.dummy");
+        "hive.optimize.index.filter.compact.maxsize", "hive.tez.dummy", "tez.task.dummy",
+        "hive.exec.dynamic.partition", "hive.exec.dynamic.partition.mode");
 
     // combine two lists
     List<String> varList = new ArrayList<String>();

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q Fri Nov  7 20:41:34 2014
@@ -1,19 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
 -- testAggrFuncsWithNoGBYNoPartDef
 select p_mfgr, 
 sum(p_retailprice) as s1  

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q Fri Nov  7 20:41:34 2014
@@ -1,19 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
 -- testAmbiguousWindowDefn
 select p_mfgr, p_name, p_size, 
 sum(p_size) over (w1) as s1, 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DistributeByOrderBy.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DistributeByOrderBy.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DistributeByOrderBy.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DistributeByOrderBy.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testPartitonBySortBy
 select p_mfgr, p_name, p_size,
 sum(p_retailprice) over (distribute by p_mfgr order by p_mfgr) as s1

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testDuplicateWindowAlias
 select p_mfgr, p_name, p_size, 
 sum(p_size) over (w1) as s1, 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testHavingLeadWithNoGBYNoWindowing
 select  p_mfgr,p_name, p_size 
 from part 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testHavingLeadWithPTF
 select  p_mfgr,p_name, p_size 
 from noop(on part 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q Fri Nov  7 20:41:34 2014
@@ -1,21 +1,6 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING,
-    p_complex array<int>
-);
-
 -- testInvalidValueBoundary
 select  p_mfgr,p_name, p_size,   
 sum(p_size) over (w1) as s ,    
 dense_rank() over(w1) as dr  
-from part  
+from part
 window w1 as (partition by p_mfgr order by p_complex range between  2 preceding and current row);

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q Fri Nov  7 20:41:34 2014
@@ -1,20 +1,6 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testJoinWithAmbigousAlias
 select abc.* 
-from noop(on part 
+from noop(on part
 partition by p_mfgr 
 order by p_name 
 ) abc join part on abc.p_partkey = p1.p_partkey;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_PartitionBySortBy.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_PartitionBySortBy.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_PartitionBySortBy.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_PartitionBySortBy.q Fri Nov  7 20:41:34 2014
@@ -1,19 +1,5 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testPartitonBySortBy
 select p_mfgr, p_name, p_size,
 sum(p_retailprice) over (partition by p_mfgr sort by p_mfgr) as s1
-from part 
+from part
 ;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 -- testWhereWithRankCond
 select  p_mfgr,p_name, p_size, 
 rank() over() as r 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries.q Fri Nov  7 20:41:34 2014
@@ -1,16 +1,3 @@
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 select p_mfgr, p_name, p_size,
     sum(p_retailprice) over (rows unbounded following) as s1 
      from part distribute by p_mfgr sort by p_name;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries2.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries2.q Fri Nov  7 20:41:34 2014
@@ -1,16 +1,3 @@
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 select p_mfgr, p_name, p_size,
     sum(p_retailprice) over (range unbounded following) as s1
      from part distribute by p_mfgr sort by p_name;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/subquery_nested_subquery.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/subquery_nested_subquery.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/subquery_nested_subquery.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/subquery_nested_subquery.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 select *
 from part x 
 where x.p_name in (select y.p_name from part y where exists (select z.p_name from part z where y.p_name = z.p_name))

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/subquery_windowing_corr.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/subquery_windowing_corr.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/subquery_windowing_corr.q Fri Nov  7 20:41:34 2014
@@ -1,21 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
-
 -- corr and windowing 
 select p_mfgr, p_name, p_size 
 from part a 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/windowing_leadlag_in_udaf.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/windowing_leadlag_in_udaf.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/windowing_leadlag_in_udaf.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/windowing_leadlag_in_udaf.q Fri Nov  7 20:41:34 2014
@@ -1,15 +1 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 select sum(lead(p_retailprice,1)) as s1  from part;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q Fri Nov  7 20:41:34 2014
@@ -1,21 +1,3 @@
-DROP TABLE IF EXISTS part;
-
--- data setup
-CREATE TABLE part(
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
-
 select p_mfgr, p_name, p_size,
 min(p_retailprice),
 rank() over(distribute by p_mfgr sort by p_name)as r,

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_over.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_over.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_over.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_over.q Fri Nov  7 20:41:34 2014
@@ -1,17 +1,3 @@
-DROP TABLE part;
-
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
 select p_mfgr, 
 lead(p_retailprice,1) as s1  
 from part;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/create_like.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/create_like.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/create_like.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/create_like.q Fri Nov  7 20:41:34 2014
@@ -21,8 +21,10 @@ INSERT OVERWRITE TABLE table2 SELECT key
 SELECT * FROM table1;
 SELECT * FROM table2;
 
-CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:hive.root}/data/files/ext_test';
-CREATE EXTERNAL TABLE table5 LIKE table4 LOCATION '${system:hive.root}/data/files/ext_test';
+dfs -cp ${system:hive.root}/data/files/ext_test ${system:test.tmp.dir}/ext_test;
+
+CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:test.tmp.dir}/ext_test';
+CREATE EXTERNAL TABLE table5 LIKE table4 LOCATION '${system:test.tmp.dir}/ext_test';
 
 SELECT * FROM table4;
 SELECT * FROM table5;
@@ -31,7 +33,7 @@ DROP TABLE table5;
 SELECT * FROM table4;
 DROP TABLE table4;
 
-CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:hive.root}/data/files/ext_test';
+CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:test.tmp.dir}/ext_test';
 SELECT * FROM table4;
 
 CREATE TABLE doctors STORED AS AVRO TBLPROPERTIES ('avro.schema.literal'='{

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/join_alt_syntax.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/join_alt_syntax.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/join_alt_syntax.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/join_alt_syntax.q Fri Nov  7 20:41:34 2014
@@ -1,20 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
 explain select p1.p_name, p2.p_name
 from part p1 , part p2;
 

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q Fri Nov  7 20:41:34 2014
@@ -1,22 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
-
-
 explain select *
 from part p1 join part p2 join part p3 on p1.p_name = p2.p_name and p2.p_name = p3.p_name;
 

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q Fri Nov  7 20:41:34 2014
@@ -1,21 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
-
 explain select *
 from part p1 join part p2 join part p3 on p1.p_name = p2.p_name join part p4 on p2.p_name = p3.p_name and p1.p_name = p4.p_name;
 

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_3.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_3.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_3.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_3.q Fri Nov  7 20:41:34 2014
@@ -1,22 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
-
-
 explain select *
 from part p1 join part p2 join part p3 
 where p1.p_name = p2.p_name and p2.p_name = p3.p_name;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_4.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_4.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_4.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_4.q Fri Nov  7 20:41:34 2014
@@ -1,21 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
-
 explain select *
 from part p1 join part p2 join part p3 on p1.p_name = p2.p_name join part p4 
 where p2.p_name = p3.p_name and p1.p_name = p4.p_name;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual1.q?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual1.q Fri Nov  7 20:41:34 2014
@@ -1,20 +1,3 @@
-DROP TABLE part;
-
--- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
-
 create table part2( 
     p2_partkey INT,
     p2_name STRING,