You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2014/05/16 20:49:40 UTC

svn commit: r1595302 - in /hive/trunk: ./ data/files/ ql/src/java/org/apache/hadoop/hive/ql/io/orc/ ql/src/test/org/apache/hadoop/hive/ql/io/orc/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: prasanthj
Date: Fri May 16 18:49:39 2014
New Revision: 1595302

URL: http://svn.apache.org/r1595302
Log:
HIVE-7067: Min() and Max() on Timestamp and Date columns for ORC returns wrong results (Prasanth J, reviewed by Jason Dere)

Added:
    hive/trunk/data/files/alltypes2.txt
    hive/trunk/ql/src/test/queries/clientpositive/orc_min_max.q
    hive/trunk/ql/src/test/results/clientpositive/orc_min_max.q.out
Modified:
    hive/trunk/pom.xml
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java

Added: hive/trunk/data/files/alltypes2.txt
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/alltypes2.txt?rev=1595302&view=auto
==============================================================================
--- hive/trunk/data/files/alltypes2.txt (added)
+++ hive/trunk/data/files/alltypes2.txt Fri May 16 18:49:39 2014
@@ -0,0 +1,2 @@
+true|10|100|1000|10000|4.0|20.0|4.2222|1969-12-31 15:59:58.174|1970-01-01|string|hello|hello|k1:v1,k2:v2|100,200|{10, "foo"}
+false|20|200|2000|20000|8.0|40.0|2.2222|1970-12-31 15:59:58.174|1971-01-01|abcd|world|world|k3:v3,k4:v4|200,300|{20, "bar"}

Modified: hive/trunk/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Fri May 16 18:49:39 2014
@@ -738,6 +738,9 @@
             <exclude>**/TestHiveServer2Concurrency.java</exclude>
             <exclude>**/TestHiveMetaStore.java</exclude>
           </excludes>
+	  <includes>
+		  <include>**/${testPackage}/*.java</include>
+	  </includes>
           <redirectTestOutputToFile>true</redirectTestOutputToFile>
           <reuseForks>false</reuseForks>
           <failIfNoTests>false</failIfNoTests>

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java Fri May 16 18:49:39 2014
@@ -520,11 +520,11 @@ final public class OrcStruct implements 
             return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
                 (PrimitiveTypeInfo) info);
           case TIMESTAMP:
-            return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
+            return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
           case DATE:
-            return PrimitiveObjectInspectorFactory.javaDateObjectInspector;
+            return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
           case DECIMAL:
-            return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
+            return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
                 (PrimitiveTypeInfo)info);
           default:
             throw new IllegalArgumentException("Unknown primitive type " +
@@ -581,13 +581,13 @@ final public class OrcStruct implements 
         return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
             TypeInfoFactory.getVarcharTypeInfo(type.getMaximumLength()));
       case TIMESTAMP:
-        return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
+        return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
       case DATE:
-        return PrimitiveObjectInspectorFactory.javaDateObjectInspector;
+        return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
       case DECIMAL:
         int precision = type.hasPrecision() ? type.getPrecision() : HiveDecimal.SYSTEM_DEFAULT_PRECISION;
         int scale =  type.hasScale()? type.getScale() : HiveDecimal.SYSTEM_DEFAULT_SCALE;
-        return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
+        return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
             TypeInfoFactory.getDecimalTypeInfo(precision, scale));
       case STRUCT:
         return new OrcStructInspector(columnId, types);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Fri May 16 18:49:39 2014
@@ -40,9 +40,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
@@ -57,8 +55,10 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.shims.HadoopShims.ByteBufferPoolShim;
 import org.apache.hadoop.hive.shims.HadoopShims.ZeroCopyReaderShim;
@@ -1021,13 +1021,14 @@ class RecordReaderImpl implements Record
     @Override
     Object next(Object previous) throws IOException {
       super.next(previous);
-      Timestamp result = null;
+      TimestampWritable result = null;
       if (valuePresent) {
         if (previous == null) {
-          result = new Timestamp(0);
+          result = new TimestampWritable();
         } else {
-          result = (Timestamp) previous;
+          result = (TimestampWritable) previous;
         }
+        Timestamp ts = new Timestamp(0);
         long millis = (data.next() + WriterImpl.BASE_TIMESTAMP) *
             WriterImpl.MILLIS_PER_SECOND;
         int newNanos = parseNanos(nanos.next());
@@ -1037,8 +1038,9 @@ class RecordReaderImpl implements Record
         } else {
           millis -= newNanos / 1000000;
         }
-        result.setTime(millis);
-        result.setNanos(newNanos);
+        ts.setTime(millis);
+        ts.setNanos(newNanos);
+        result.set(ts);
       }
       return result;
     }
@@ -1144,14 +1146,14 @@ class RecordReaderImpl implements Record
     @Override
     Object next(Object previous) throws IOException {
       super.next(previous);
-      Date result = null;
+      DateWritable result = null;
       if (valuePresent) {
         if (previous == null) {
-          result = new Date(0);
+          result = new DateWritable();
         } else {
-          result = (Date) previous;
+          result = (DateWritable) previous;
         }
-        result.setTime(DateWritable.daysToMillis((int) reader.next()));
+        result.set((int) reader.next());
       }
       return result;
     }
@@ -1223,10 +1225,16 @@ class RecordReaderImpl implements Record
     @Override
     Object next(Object previous) throws IOException {
       super.next(previous);
+      HiveDecimalWritable result = null;
       if (valuePresent) {
-        HiveDecimal dec = HiveDecimal.create(SerializationUtils.readBigInteger(valueStream),
-            (int) scaleStream.next());
-        return HiveDecimalUtils.enforcePrecisionScale(dec, precision, scale);
+        if (previous == null) {
+          result = new HiveDecimalWritable();
+        } else {
+          result = (HiveDecimalWritable) previous;
+        }
+        result.set(HiveDecimal.create(SerializationUtils.readBigInteger(valueStream),
+            (int) scaleStream.next()));
+        return HiveDecimalUtils.enforcePrecisionScale(result, precision, scale);
       }
       return null;
     }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java Fri May 16 18:49:39 2014
@@ -127,7 +127,7 @@ public class TestNewInputOutputFormat {
         ", , bye, {[{1, bye}, {2, sigh}]}, [{100000000, cat}," +
         " {-100000, in}, {1234, hat}]," +
         " {chani={5, chani}, mauddib={1, mauddib}}," +
-        " 2000-03-12 15:00:01.0, 12345678.6547457}");
+        " 2000-03-12 15:00:01, 12345678.6547457}");
     localFs.delete(outputPath, true);
   }
   

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewIntegerEncoding.java Fri May 16 18:49:39 2014
@@ -27,6 +27,7 @@ import java.util.Random;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
@@ -1100,7 +1101,7 @@ public class TestNewIntegerEncoding {
     while (rows.hasNext()) {
       Object row = rows.next(null);
       assertEquals(tslist.get(idx++).getNanos(),
-          ((Timestamp) ((OrcStruct) row).getFieldValue(0)).getNanos());
+          ((TimestampWritable) ((OrcStruct) row).getFieldValue(0)).getNanos());
     }
   }
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Fri May 16 18:49:39 2014
@@ -22,7 +22,6 @@ import static junit.framework.Assert.ass
 import static junit.framework.Assert.assertNotNull;
 import static junit.framework.Assert.assertNull;
 import static junit.framework.Assert.assertTrue;
-import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_ORC_ZEROCOPY;
 
 import java.io.File;
 import java.io.IOException;
@@ -41,13 +40,14 @@ import java.util.Random;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -511,7 +511,7 @@ public class TestOrcFile {
     int idx = 0;
     while (rows.hasNext()) {
       Object row = rows.next(null);
-      assertEquals(tslist.get(idx++).getNanos(), ((Timestamp) row).getNanos());
+      assertEquals(tslist.get(idx++).getNanos(), ((TimestampWritable) row).getNanos());
     }
   }
 
@@ -1125,15 +1125,15 @@ public class TestOrcFile {
     OrcStruct row = new OrcStruct(3);
     OrcUnion union = new OrcUnion();
     row.setFieldValue(1, union);
-    row.setFieldValue(0, Timestamp.valueOf("2000-03-12 15:00:00"));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
     HiveDecimal value = HiveDecimal.create("12345678.6547456");
-    row.setFieldValue(2, value);
+    row.setFieldValue(2, new HiveDecimalWritable(value));
     union.set((byte) 0, new IntWritable(42));
     writer.addRow(row);
-    row.setFieldValue(0, Timestamp.valueOf("2000-03-20 12:00:00.123456789"));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
     union.set((byte) 1, new Text("hello"));
     value = HiveDecimal.create("-5643.234");
-    row.setFieldValue(2, value);
+    row.setFieldValue(2, new HiveDecimalWritable(value));
     writer.addRow(row);
     row.setFieldValue(0, null);
     row.setFieldValue(1, null);
@@ -1145,13 +1145,13 @@ public class TestOrcFile {
     union.set((byte) 1, null);
     writer.addRow(row);
     union.set((byte) 0, new IntWritable(200000));
-    row.setFieldValue(0, Timestamp.valueOf("1900-01-01 00:00:00"));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("1900-01-01 00:00:00")));
     value = HiveDecimal.create("10000000000000000000");
-    row.setFieldValue(2, value);
+    row.setFieldValue(2, new HiveDecimalWritable(value));
     writer.addRow(row);
     Random rand = new Random(42);
     for(int i=1900; i < 2200; ++i) {
-      row.setFieldValue(0, Timestamp.valueOf(i + "-05-05 12:34:56." + i));
+      row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)));
       if ((i & 1) == 0) {
         union.set((byte) 0, new IntWritable(i*i));
       } else {
@@ -1159,7 +1159,7 @@ public class TestOrcFile {
       }
       value = HiveDecimal.create(new BigInteger(64, rand),
           rand.nextInt(18));
-      row.setFieldValue(2, value);
+      row.setFieldValue(2, new HiveDecimalWritable(value));
       if (maxValue.compareTo(value) < 0) {
         maxValue = value;
       }
@@ -1215,19 +1215,21 @@ public class TestOrcFile {
     inspector = reader.getObjectInspector();
     assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>",
         inspector.getTypeName());
-    assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),
         row.getFieldValue(0));
     union = (OrcUnion) row.getFieldValue(1);
     assertEquals(0, union.getTag());
     assertEquals(new IntWritable(42), union.getObject());
-    assertEquals(HiveDecimal.create("12345678.6547456"), row.getFieldValue(2));
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("12345678.6547456")),
+        row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
     assertEquals(2, rows.getRowNumber());
-    assertEquals(Timestamp.valueOf("2000-03-20 12:00:00.123456789"),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());
-    assertEquals(HiveDecimal.create("-5643.234"), row.getFieldValue(2));
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")),
+        row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
     assertEquals(null, row.getFieldValue(0));
     assertEquals(null, row.getFieldValue(1));
@@ -1244,15 +1246,15 @@ public class TestOrcFile {
     assertEquals(null, union.getObject());
     assertEquals(null, row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
-    assertEquals(Timestamp.valueOf("1900-01-01 00:00:00"),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("1900-01-01 00:00:00")),
         row.getFieldValue(0));
     assertEquals(new IntWritable(200000), union.getObject());
-    assertEquals(HiveDecimal.create("10000000000000000000"),
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),
                  row.getFieldValue(2));
     rand = new Random(42);
     for(int i=1900; i < 2200; ++i) {
       row = (OrcStruct) rows.next(row);
-      assertEquals(Timestamp.valueOf(i + "-05-05 12:34:56." + i),
+      assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
           row.getFieldValue(0));
       if ((i & 1) == 0) {
         assertEquals(0, union.getTag());
@@ -1261,8 +1263,8 @@ public class TestOrcFile {
         assertEquals(1, union.getTag());
         assertEquals(new Text(Integer.toString(i * i)), union.getObject());
       }
-      assertEquals(HiveDecimal.create(new BigInteger(64, rand),
-                                   rand.nextInt(18)), row.getFieldValue(2));
+      assertEquals(new HiveDecimalWritable(HiveDecimal.create(new BigInteger(64, rand),
+                                   rand.nextInt(18))), row.getFieldValue(2));
     }
     for(int i=0; i < 5000; ++i) {
       row = (OrcStruct) rows.next(row);
@@ -1279,11 +1281,11 @@ public class TestOrcFile {
     assertEquals(reader.getNumberOfRows(), rows.getRowNumber());
     rows.seekToRow(1);
     row = (OrcStruct) rows.next(row);
-    assertEquals(Timestamp.valueOf("2000-03-20 12:00:00.123456789"),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());
-    assertEquals(HiveDecimal.create("-5643.234"), row.getFieldValue(2));
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")), row.getFieldValue(2));
     rows.close();
   }
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java?rev=1595302&r1=1595301&r2=1595302&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java Fri May 16 18:49:39 2014
@@ -18,7 +18,14 @@
 
 package org.apache.hadoop.hive.ql.io.orc;
 
+import java.io.File;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
+import java.util.Random;
+
 import junit.framework.Assert;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -26,6 +33,7 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.io.BooleanWritable;
@@ -34,12 +42,6 @@ import org.apache.hadoop.io.NullWritable
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.File;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.Calendar;
-import java.util.Random;
-
 /**
 *
 * Class that tests ORC reader vectorization by comparing records that are
@@ -76,8 +78,8 @@ public class TestVectorizedORCReader {
     private final Date dt;
     private final HiveDecimal hd;
 
-    MyRecord(Boolean bo, Byte by, Integer i, Long l, Short s, Double d, String k, Timestamp t,
-             Date dt, HiveDecimal hd) {
+    MyRecord(Boolean bo, Byte by, Integer i, Long l, Short s, Double d, String k,
+        Timestamp t, Date dt, HiveDecimal hd) {
       this.bo = bo;
       this.by = by;
       this.i = i;
@@ -155,27 +157,28 @@ public class TestVectorizedORCReader {
             continue;
           }
           // Timestamps are stored as long, so convert and compare
-          if (a instanceof Timestamp) {
-            Timestamp t = ((Timestamp) a);
+          if (a instanceof TimestampWritable) {
+            TimestampWritable t = ((TimestampWritable) a);
             // Timestamp.getTime() is overriden and is 
             // long time = super.getTime();
             // return (time + (nanos / 1000000));
-            Long timeInNanoSec = (t.getTime() * 1000000) + (t.getNanos() % 1000000);
+            Long timeInNanoSec = (t.getTimestamp().getTime() * 1000000)
+                + (t.getTimestamp().getNanos() % 1000000);
             Assert.assertEquals(true, timeInNanoSec.toString().equals(b.toString()));
             continue;
           }
 
           // Dates are stored as long, so convert and compare
-          if (a instanceof Date) {
-            Date adt = (Date) a;
-            Assert.assertEquals(adt.getTime(), DateWritable.daysToMillis((int) ((LongWritable) b).get()));
+          if (a instanceof DateWritable) {
+            DateWritable adt = (DateWritable) a;
+            Assert.assertEquals(adt.get().getTime(), DateWritable.daysToMillis((int) ((LongWritable) b).get()));
             continue;
           }
 
           // Decimals are stored as BigInteger, so convert and compare
-          if (a instanceof HiveDecimal) {
-            HiveDecimalWritable dec = (HiveDecimalWritable) b;
-            Assert.assertEquals(a, dec.getHiveDecimal());
+          if (a instanceof HiveDecimalWritable) {
+            HiveDecimalWritable dec = (HiveDecimalWritable) a;
+            Assert.assertEquals(dec, b);
           }
 
           if (null == a) {

Added: hive/trunk/ql/src/test/queries/clientpositive/orc_min_max.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/orc_min_max.q?rev=1595302&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/orc_min_max.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/orc_min_max.q Fri May 16 18:49:39 2014
@@ -0,0 +1,32 @@
+create table if not exists alltypes (
+ bo boolean,
+ ti tinyint,
+ si smallint,
+ i int,
+ bi bigint,
+ f float,
+ d double,
+ de decimal(10,3),
+ ts timestamp,
+ da date,
+ s string,
+ c char(5),
+ vc varchar(5),
+ m map<string, string>,
+ l array<int>,
+ st struct<c1:int, c2:string>
+) row format delimited fields terminated by '|'
+collection items terminated by ','
+map keys terminated by ':' stored as textfile;
+
+create table alltypes_orc like alltypes;
+alter table alltypes_orc set fileformat orc;
+
+load data local inpath '../../data/files/alltypes2.txt' overwrite into table alltypes;
+
+insert overwrite table alltypes_orc select * from alltypes;
+
+select min(bo), max(bo), min(ti), max(ti), min(si), max(si), min(i), max(i), min(bi), max(bi), min(f), max(f), min(d), max(d), min(de), max(de), min(ts), max(ts), min(da), max(da), min(s), max(s), min(c), max(c), min(vc), max(vc) from alltypes;
+
+select min(bo), max(bo), min(ti), max(ti), min(si), max(si), min(i), max(i), min(bi), max(bi), min(f), max(f), min(d), max(d), min(de), max(de), min(ts), max(ts), min(da), max(da), min(s), max(s), min(c), max(c), min(vc), max(vc) from alltypes_orc;
+

Added: hive/trunk/ql/src/test/results/clientpositive/orc_min_max.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/orc_min_max.q.out?rev=1595302&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/orc_min_max.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/orc_min_max.q.out Fri May 16 18:49:39 2014
@@ -0,0 +1,142 @@
+PREHOOK: query: create table if not exists alltypes (
+ bo boolean,
+ ti tinyint,
+ si smallint,
+ i int,
+ bi bigint,
+ f float,
+ d double,
+ de decimal(10,3),
+ ts timestamp,
+ da date,
+ s string,
+ c char(5),
+ vc varchar(5),
+ m map<string, string>,
+ l array<int>,
+ st struct<c1:int, c2:string>
+) row format delimited fields terminated by '|'
+collection items terminated by ','
+map keys terminated by ':' stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table if not exists alltypes (
+ bo boolean,
+ ti tinyint,
+ si smallint,
+ i int,
+ bi bigint,
+ f float,
+ d double,
+ de decimal(10,3),
+ ts timestamp,
+ da date,
+ s string,
+ c char(5),
+ vc varchar(5),
+ m map<string, string>,
+ l array<int>,
+ st struct<c1:int, c2:string>
+) row format delimited fields terminated by '|'
+collection items terminated by ','
+map keys terminated by ':' stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@alltypes
+PREHOOK: query: create table alltypes_orc like alltypes
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table alltypes_orc like alltypes
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@alltypes_orc
+PREHOOK: query: alter table alltypes_orc set fileformat orc
+PREHOOK: type: ALTERTABLE_FILEFORMAT
+PREHOOK: Input: default@alltypes_orc
+PREHOOK: Output: default@alltypes_orc
+POSTHOOK: query: alter table alltypes_orc set fileformat orc
+POSTHOOK: type: ALTERTABLE_FILEFORMAT
+POSTHOOK: Input: default@alltypes_orc
+POSTHOOK: Output: default@alltypes_orc
+PREHOOK: query: load data local inpath '../../data/files/alltypes2.txt' overwrite into table alltypes
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@alltypes
+POSTHOOK: query: load data local inpath '../../data/files/alltypes2.txt' overwrite into table alltypes
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@alltypes
+PREHOOK: query: insert overwrite table alltypes_orc select * from alltypes
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypes
+PREHOOK: Output: default@alltypes_orc
+POSTHOOK: query: insert overwrite table alltypes_orc select * from alltypes
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypes
+POSTHOOK: Output: default@alltypes_orc
+POSTHOOK: Lineage: alltypes_orc.bi SIMPLE [(alltypes)alltypes.FieldSchema(name:bi, type:bigint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.bo SIMPLE [(alltypes)alltypes.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.c SIMPLE [(alltypes)alltypes.FieldSchema(name:c, type:char(5), comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.d SIMPLE [(alltypes)alltypes.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.da SIMPLE [(alltypes)alltypes.FieldSchema(name:da, type:date, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.de SIMPLE [(alltypes)alltypes.FieldSchema(name:de, type:decimal(10,3), comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.f SIMPLE [(alltypes)alltypes.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.i SIMPLE [(alltypes)alltypes.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.l SIMPLE [(alltypes)alltypes.FieldSchema(name:l, type:array<int>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.m SIMPLE [(alltypes)alltypes.FieldSchema(name:m, type:map<string,string>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.s SIMPLE [(alltypes)alltypes.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.si SIMPLE [(alltypes)alltypes.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.st SIMPLE [(alltypes)alltypes.FieldSchema(name:st, type:struct<c1:int,c2:string>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.ti SIMPLE [(alltypes)alltypes.FieldSchema(name:ti, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.ts SIMPLE [(alltypes)alltypes.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc SIMPLE [(alltypes)alltypes.FieldSchema(name:vc, type:varchar(5), comment:null), ]
+PREHOOK: query: select min(bo), max(bo), min(ti), max(ti), min(si), max(si), min(i), max(i), min(bi), max(bi), min(f), max(f), min(d), max(d), min(de), max(de), min(ts), max(ts), min(da), max(da), min(s), max(s), min(c), max(c), min(vc), max(vc) from alltypes
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypes
+#### A masked pattern was here ####
+POSTHOOK: query: select min(bo), max(bo), min(ti), max(ti), min(si), max(si), min(i), max(i), min(bi), max(bi), min(f), max(f), min(d), max(d), min(de), max(de), min(ts), max(ts), min(da), max(da), min(s), max(s), min(c), max(c), min(vc), max(vc) from alltypes
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypes
+#### A masked pattern was here ####
+POSTHOOK: Lineage: alltypes_orc.bi SIMPLE [(alltypes)alltypes.FieldSchema(name:bi, type:bigint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.bo SIMPLE [(alltypes)alltypes.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.c SIMPLE [(alltypes)alltypes.FieldSchema(name:c, type:char(5), comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.d SIMPLE [(alltypes)alltypes.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.da SIMPLE [(alltypes)alltypes.FieldSchema(name:da, type:date, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.de SIMPLE [(alltypes)alltypes.FieldSchema(name:de, type:decimal(10,3), comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.f SIMPLE [(alltypes)alltypes.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.i SIMPLE [(alltypes)alltypes.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.l SIMPLE [(alltypes)alltypes.FieldSchema(name:l, type:array<int>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.m SIMPLE [(alltypes)alltypes.FieldSchema(name:m, type:map<string,string>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.s SIMPLE [(alltypes)alltypes.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.si SIMPLE [(alltypes)alltypes.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.st SIMPLE [(alltypes)alltypes.FieldSchema(name:st, type:struct<c1:int,c2:string>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.ti SIMPLE [(alltypes)alltypes.FieldSchema(name:ti, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.ts SIMPLE [(alltypes)alltypes.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc SIMPLE [(alltypes)alltypes.FieldSchema(name:vc, type:varchar(5), comment:null), ]
+false	true	10	20	100	200	1000	2000	10000	20000	4.0	8.0	20.0	40.0	2.222	4.222	1969-12-31 15:59:58.174	1970-12-31 15:59:58.174	1970-01-01	1971-01-01	abcd	string	hello	world	hello	world
+PREHOOK: query: select min(bo), max(bo), min(ti), max(ti), min(si), max(si), min(i), max(i), min(bi), max(bi), min(f), max(f), min(d), max(d), min(de), max(de), min(ts), max(ts), min(da), max(da), min(s), max(s), min(c), max(c), min(vc), max(vc) from alltypes_orc
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypes_orc
+#### A masked pattern was here ####
+POSTHOOK: query: select min(bo), max(bo), min(ti), max(ti), min(si), max(si), min(i), max(i), min(bi), max(bi), min(f), max(f), min(d), max(d), min(de), max(de), min(ts), max(ts), min(da), max(da), min(s), max(s), min(c), max(c), min(vc), max(vc) from alltypes_orc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypes_orc
+#### A masked pattern was here ####
+POSTHOOK: Lineage: alltypes_orc.bi SIMPLE [(alltypes)alltypes.FieldSchema(name:bi, type:bigint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.bo SIMPLE [(alltypes)alltypes.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.c SIMPLE [(alltypes)alltypes.FieldSchema(name:c, type:char(5), comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.d SIMPLE [(alltypes)alltypes.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.da SIMPLE [(alltypes)alltypes.FieldSchema(name:da, type:date, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.de SIMPLE [(alltypes)alltypes.FieldSchema(name:de, type:decimal(10,3), comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.f SIMPLE [(alltypes)alltypes.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.i SIMPLE [(alltypes)alltypes.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.l SIMPLE [(alltypes)alltypes.FieldSchema(name:l, type:array<int>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.m SIMPLE [(alltypes)alltypes.FieldSchema(name:m, type:map<string,string>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.s SIMPLE [(alltypes)alltypes.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.si SIMPLE [(alltypes)alltypes.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.st SIMPLE [(alltypes)alltypes.FieldSchema(name:st, type:struct<c1:int,c2:string>, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.ti SIMPLE [(alltypes)alltypes.FieldSchema(name:ti, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.ts SIMPLE [(alltypes)alltypes.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc SIMPLE [(alltypes)alltypes.FieldSchema(name:vc, type:varchar(5), comment:null), ]
+false	true	10	20	100	200	1000	2000	10000	20000	4.0	8.0	20.0	40.0	2.222	4.222	1969-12-31 15:59:58.174	1970-12-31 15:59:58.174	1970-01-01	1971-01-01	abcd	string	hello	world	hello	world