You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sd...@apache.org on 2011/08/12 21:21:38 UTC

svn commit: r1157222 [3/3] - in /hive/trunk: ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/udf/ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/ ql/src/test/queries/clie...

Added: hive/trunk/ql/src/test/results/clientpositive/timestamp_comparison.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/timestamp_comparison.q.out?rev=1157222&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/timestamp_comparison.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/timestamp_comparison.q.out Fri Aug 12 19:21:36 2011
@@ -0,0 +1,99 @@
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) > 
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-32_519_8364837448470167055/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) > 
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-32_519_8364837448470167055/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-35_762_374402174619403002/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-35_762_374402174619403002/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) = 
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-38_788_5635730866062139704/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) = 
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-38_788_5635730866062139704/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <>
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-41_803_7173891637306783676/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <>
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-41_803_7173891637306783676/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-44_748_5479207213327992908/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-44_748_5479207213327992908/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <=
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-47_676_4858015920608435969/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <=
+  cast('2011-05-06 07:08:09' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-47_676_4858015920608435969/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+  cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-50_625_9121503583543935508/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) >=
+  cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-50_625_9121503583543935508/-mr-10000
+false
+PREHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+  cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-53_594_2460043482331880840/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09' as timestamp) <
+  cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-53_594_2460043482331880840/-mr-10000
+true
+PREHOOK: query: select cast('2011-05-06 07:08:09.1000' as timestamp) =
+  cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-56_524_8870776672820297370/-mr-10000
+POSTHOOK: query: select cast('2011-05-06 07:08:09.1000' as timestamp) =
+  cast('2011-05-06 07:08:09.1' as timestamp) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-07-06_18-09-56_524_8870776672820297370/-mr-10000
+true

Added: hive/trunk/ql/src/test/results/clientpositive/timestamp_udf.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/timestamp_udf.q.out?rev=1157222&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/timestamp_udf.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/timestamp_udf.q.out Fri Aug 12 19:21:36 2011
@@ -0,0 +1,194 @@
+PREHOOK: query: drop table timestamp_udf
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_udf
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table timestamp_udf_string
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table timestamp_udf_string
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table timestamp_udf (t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_udf (t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_udf
+PREHOOK: query: create table timestamp_udf_string (t string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table timestamp_udf_string (t string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_udf_string
+PREHOOK: query: from src
+  insert overwrite table timestamp_udf 
+    select '2011-05-06 07:08:09.1234567' limit 1
+  insert overwrite table timestamp_udf_string
+    select '2011-05-06 07:08:09.1234567' limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_udf
+PREHOOK: Output: default@timestamp_udf_string
+POSTHOOK: query: from src
+  insert overwrite table timestamp_udf 
+    select '2011-05-06 07:08:09.1234567' limit 1
+  insert overwrite table timestamp_udf_string
+    select '2011-05-06 07:08:09.1234567' limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_udf
+POSTHOOK: Output: default@timestamp_udf_string
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+PREHOOK: query: -- Test UDFs with Timestamp input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+    weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+  from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-23_171_7947076159754995294/-mr-10000
+POSTHOOK: query: -- Test UDFs with Timestamp input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t),
+    weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+  from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-23_171_7947076159754995294/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+1304690889	2011	5	6	6	18	7	8	9	2011-05-06
+PREHOOK: query: select date_add(t, 5), date_sub(t, 10)
+  from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-26_332_7736225081611226782/-mr-10000
+POSTHOOK: query: select date_add(t, 5), date_sub(t, 10)
+  from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-26_332_7736225081611226782/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-11	2011-04-26
+PREHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+  from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-29_301_1140620362250414035/-mr-10000
+POSTHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+  from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-29_301_1140620362250414035/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+0	3333	-3333
+PREHOOK: query: select from_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-32_281_8587314075270802684/-mr-10000
+POSTHOOK: query: select from_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-32_281_8587314075270802684/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 02:08:09.2464567
+PREHOOK: query: select to_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-35_219_3365806571591781180/-mr-10000
+POSTHOOK: query: select to_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-35_219_3365806571591781180/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 12:08:09.2464567
+PREHOOK: query: -- Test UDFs with string input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t), 
+    weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+  from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-38_187_2248272713610212604/-mr-10000
+POSTHOOK: query: -- Test UDFs with string input
+select unix_timestamp(t), year(t), month(t), day(t), dayofmonth(t), 
+    weekofyear(t), hour(t), minute(t), second(t), to_date(t)
+  from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-38_187_2248272713610212604/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+1304690889	2011	5	6	6	18	7	8	9	2011-05-06
+PREHOOK: query: select date_add(t, 5), date_sub(t, 10)  from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-41_233_7026658402237219829/-mr-10000
+POSTHOOK: query: select date_add(t, 5), date_sub(t, 10)  from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-41_233_7026658402237219829/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-11	2011-04-26
+PREHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+  from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-44_229_1408313723115883676/-mr-10000
+POSTHOOK: query: select datediff(t, t), datediff(t, '2002-03-21'), datediff('2002-03-21', t)
+  from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-44_229_1408313723115883676/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+0	3333	-3333
+PREHOOK: query: select from_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-47_192_7770480524717901723/-mr-10000
+POSTHOOK: query: select from_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-47_192_7770480524717901723/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 02:08:09.2464567
+PREHOOK: query: select to_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-50_086_9149577780206626620/-mr-10000
+POSTHOOK: query: select to_utc_timestamp(t, 'America/Chicago')
+  from timestamp_udf_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-08-10_11-29-50_086_9149577780206626620/-mr-10000
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+2011-05-06 12:08:09.2464567
+PREHOOK: query: drop table timestamp_udf
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: default@timestamp_udf
+POSTHOOK: query: drop table timestamp_udf
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: default@timestamp_udf
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []
+PREHOOK: query: drop table timestamp_udf_string
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_udf_string
+PREHOOK: Output: default@timestamp_udf_string
+POSTHOOK: query: drop table timestamp_udf_string
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_udf_string
+POSTHOOK: Output: default@timestamp_udf_string
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+POSTHOOK: Lineage: timestamp_udf_string.t SIMPLE []

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Fri Aug 12 19:21:36 2011
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 
 /**
  * SerDeUtils.
@@ -255,6 +256,13 @@ public final class SerDeUtils {
           sb.append('"');
           break;
         }
+        case TIMESTAMP: {
+          sb.append('"');
+          sb.append(((TimestampObjectInspector) poi)
+              .getPrimitiveWritableObject(o));
+          sb.append('"');
+          break;
+        }
         default:
           throw new RuntimeException("Unknown primitive type: "
               + poi.getPrimitiveCategory());

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Fri Aug 12 19:21:36 2011
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -52,6 +53,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -310,6 +312,17 @@ public class BinarySortableSerDe impleme
         }
         return r;
       }
+      case TIMESTAMP:
+        TimestampWritable t = (reuse == null ? new TimestampWritable() :
+            (TimestampWritable) reuse);
+        byte[] bytes = new byte[8];
+
+        for (int i = 0; i < bytes.length; i++) {
+          bytes[i] = buffer.read(invert);
+        }
+        t.setBinarySortable(bytes, 0);
+        return t;
+
       default: {
         throw new RuntimeException("Unrecognized type: "
             + ptype.getPrimitiveCategory());
@@ -539,6 +552,15 @@ public class BinarySortableSerDe impleme
         buffer.write((byte) 0, invert);
         return;
       }
+      case TIMESTAMP: {
+        TimestampObjectInspector toi = (TimestampObjectInspector) poi;
+        TimestampWritable t = toi.getPrimitiveWritableObject(o);
+        byte[] data = t.getBinarySortable();
+        for (int i = 0; i < data.length; i++) {
+          buffer.write(data[i], invert);
+        }
+        return;
+      }
       default: {
         throw new RuntimeException("Unrecognized type: "
             + poi.getPrimitiveCategory());

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,519 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.math.BigDecimal;
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+
+/**
+ * TimestampWritable
+ * Writable equivalent of java.sq.Timestamp
+ *
+ * Timestamps are of the format
+ *    YYYY-MM-DD HH:MM:SS.[fff...]
+ *
+ * We encode Unix timestamp in seconds in 4 bytes, using the MSB to signify
+ * whether the timestamp has a fractional portion.
+ *
+ * The fractional portion is reversed, and encoded as a VInt
+ * so timestamps with less precision use fewer bytes.
+ *
+ *      0.1    -> 1
+ *      0.01   -> 10
+ *      0.001  -> 100
+ *
+ */
+public class TimestampWritable implements WritableComparable<TimestampWritable> {
+  static final private Log LOG = LogFactory.getLog(TimestampWritable.class);
+
+  static final public byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
+
+  private static final int NO_DECIMAL_MASK = 0x7FFFFFFF;
+  private static final int HAS_DECIMAL_MASK = 0x80000000;
+
+  private static final DateFormat dateFormat =
+    new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+  private Timestamp timestamp = new Timestamp(0);
+
+  /**
+   * true if data is stored in timestamp field rather than byte arrays.
+   *      allows for lazy conversion to bytes when necessary
+   * false otherwise
+   */
+  private boolean bytesEmpty;
+  private boolean timestampEmpty;
+
+  /* Allow use of external byte[] for efficiency */
+  private byte[] currentBytes;
+  private final byte[] internalBytes = new byte[9];
+  private byte[] externalBytes;
+  private int offset;
+
+  /* Reused to read VInts */
+  static private final VInt vInt = new VInt();
+
+  /* Constructors */
+  public TimestampWritable() {
+    Arrays.fill(internalBytes, (byte) 0x0);
+    bytesEmpty = false;
+    currentBytes = internalBytes;
+    offset = 0;
+
+    clearTimestamp();
+  }
+
+  public TimestampWritable(byte[] bytes, int offset) {
+    set(bytes, offset);
+  }
+
+  public TimestampWritable(TimestampWritable t) {
+    this(t.getBytes(), 0);
+  }
+
+  public TimestampWritable(Timestamp t) {
+    set(t);
+  }
+
+  public void set(byte[] bytes, int offset) {
+    externalBytes = bytes;
+    this.offset = offset;
+    bytesEmpty = false;
+    currentBytes = externalBytes;
+
+    clearTimestamp();
+  }
+
+  public void set(Timestamp t) {
+    if (t == null) {
+      timestamp.setTime(0);
+      timestamp.setNanos(0);
+      return;
+    }
+    this.timestamp = t;
+    bytesEmpty = true;
+    timestampEmpty = false;
+  }
+
+  public void set(TimestampWritable t) {
+    if (t.bytesEmpty) {
+      set(t.getTimestamp());
+      return;
+    }
+    if (t.currentBytes == t.externalBytes) {
+      set(t.currentBytes, t.offset);
+    } else {
+      set(t.currentBytes, 0);
+    }
+  }
+
+  private void clearTimestamp() {
+    timestampEmpty = true;
+  }
+
+  public void writeToByteStream(Output byteStream) {
+    checkBytes();
+    byteStream.write(currentBytes, offset, getTotalLength());
+  }
+
+  /**
+   *
+   * @return seconds corresponding to this TimestampWritable
+   */
+  public int getSeconds() {
+    if (bytesEmpty) {
+      return (int) (timestamp.getTime() / 1000);
+    }
+    return TimestampWritable.getSeconds(currentBytes, offset);
+  }
+
+  /**
+   *
+   * @return nanoseconds in this TimestampWritable
+   */
+  public int getNanos() {
+    if (!timestampEmpty) {
+      return timestamp.getNanos();
+    }
+
+    return TimestampWritable.getNanos(currentBytes, offset+4);
+  }
+
+  /**
+   *
+   * @return length of serialized TimestampWritable data
+   */
+  private int getTotalLength() {
+    return 4 + getDecimalLength();
+  }
+
+  /**
+   *
+   * @return number of bytes the variable length decimal takes up
+   */
+  private int getDecimalLength() {
+    checkBytes();
+    return WritableUtils.decodeVIntSize(currentBytes[offset+4]);
+  }
+
+  public Timestamp getTimestamp() {
+    if (timestampEmpty) {
+      populateTimestamp();
+    }
+    return timestamp;
+  }
+
+  /**
+   * Used to create copies of objects
+   * @return a copy of the internal TimestampWritable byte[]
+   */
+  public byte[] getBytes() {
+    checkBytes();
+
+    int len = getTotalLength();
+    byte[] b = new byte[len];
+
+    System.arraycopy(currentBytes, offset, b, 0, len);
+    return b;
+  }
+
+  /**
+   * @return byte[] representation of TimestampWritable that is binary
+   * sortable (4 byte seconds, 4 bytes for nanoseconds)
+   */
+  public byte[] getBinarySortable() {
+    byte[] b = new byte[8];
+    int nanos = getNanos();
+    int seconds = HAS_DECIMAL_MASK | getSeconds();
+    intToBytes(seconds, b, 0);
+    intToBytes(nanos, b, 4);
+    return b;
+  }
+
+  /**
+   * Given a byte[] that has binary sortable data, initialize the internal
+   * structures to hold that data
+   * @param bytes
+   * @param offset
+   */
+  public void setBinarySortable(byte[] bytes, int offset) {
+    int seconds = bytesToInt(bytes, offset);
+    int nanos = bytesToInt(bytes, offset+4);
+    if (nanos == 0) {
+      seconds &= NO_DECIMAL_MASK;
+    } else {
+      seconds |= HAS_DECIMAL_MASK;
+    }
+    intToBytes(seconds, internalBytes, 0);
+    setNanosBytes(nanos, internalBytes, 4);
+    currentBytes = internalBytes;
+    this.offset = 0;
+  }
+
+  /**
+   * The data of TimestampWritable can be stored either in a byte[]
+   * or in a Timestamp object. Calling this method ensures that the byte[]
+   * is populated from the Timestamp object if previously empty.
+   */
+  private void checkBytes() {
+    if (bytesEmpty) {
+      // Populate byte[] from Timestamp
+      convertTimestampToBytes(timestamp, internalBytes, 0);
+      offset = 0;
+      currentBytes = internalBytes;
+      bytesEmpty = false;
+    }
+  }
+
+  /**
+   *
+   * @return double representation of the timestamp, accurate to nanoseconds
+   */
+  public double getDouble() {
+    double seconds, nanos;
+    if (bytesEmpty) {
+      seconds = timestamp.getTime() / 1000;
+      nanos = timestamp.getNanos();
+    } else {
+      seconds = getSeconds();
+      nanos = getNanos();
+    }
+    return seconds + ((double) nanos) / 1000000000;
+  }
+
+
+
+  public void readFields(DataInput in) throws IOException {
+    in.readFully(internalBytes, 0, 4);
+    if (TimestampWritable.hasDecimal(internalBytes[0])) {
+      in.readFully(internalBytes, 4, 1);
+      int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
+      in.readFully(internalBytes, 5, len-1);
+    }
+    currentBytes = internalBytes;
+    this.offset = 0;
+  }
+
+  public void write(OutputStream out) throws IOException {
+    checkBytes();
+    out.write(currentBytes, offset, getTotalLength());
+  }
+
+  public void write(DataOutput out) throws IOException {
+    write((OutputStream) out);
+  }
+
+  public int compareTo(TimestampWritable t) {
+    checkBytes();
+    int s1 = this.getSeconds();
+    int s2 = t.getSeconds();
+    if (s1 == s2) {
+      int n1 = this.getNanos();
+      int n2 = t.getNanos();
+      if (n1 == n2) {
+        return 0;
+      }
+      return n1 - n2;
+    } else {
+      return s1 - s2;
+    }
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return compareTo((TimestampWritable) o) == 0;
+  }
+
+  @Override
+  public String toString() {
+    if (timestampEmpty) {
+      populateTimestamp();
+    }
+
+    String timestampString = timestamp.toString();
+    if (timestampString.length() > 19) {
+      if (timestampString.length() == 21) {
+        if (timestampString.substring(19).compareTo(".0") == 0) {
+          return dateFormat.format(timestamp);
+        }
+      }
+      return dateFormat.format(timestamp) + timestampString.substring(19);
+    }
+
+    return dateFormat.format(timestamp);
+  }
+
+  @Override
+  public int hashCode() {
+    long seconds = getSeconds();
+    seconds <<= 32;
+    seconds |= getNanos();
+    return (int) ((seconds >>> 32) ^ seconds);
+  }
+
+  private void populateTimestamp() {
+    long seconds = getSeconds();
+    int nanos = getNanos();
+    timestamp.setTime(seconds * 1000);
+    timestamp.setNanos(nanos);
+  }
+
+  /** Static methods **/
+
+  /**
+   * Gets seconds stored as integer at bytes[offset]
+   * @param bytes
+   * @param offset
+   * @return
+   */
+  public static int getSeconds(byte[] bytes, int offset) {
+    return NO_DECIMAL_MASK & bytesToInt(bytes, offset);
+  }
+
+  public static int getNanos(byte[] bytes, int offset) {
+    LazyBinaryUtils.readVInt(bytes, offset, vInt);
+    int val = vInt.value;
+    int len = (int) Math.floor(Math.log10(val)) + 1;
+
+    // Reverse the value
+    int tmp = 0;
+    while (val != 0) {
+      tmp *= 10;
+      tmp += val % 10;
+      val /= 10;
+    }
+    val = tmp;
+
+    if (len < 9) {
+      val *= Math.pow(10, 9 - len);
+    }
+    return val;
+  }
+
+  /**
+   * Writes a Timestamp's serialized value to byte array b at
+   * @param t
+   * @param b
+   */
+  public static void convertTimestampToBytes(Timestamp t, byte[] b,
+      int offset) {
+    if (b.length < 9) {
+      LOG.error("byte array too short");
+    }
+    long millis = t.getTime();
+    int nanos = t.getNanos();
+
+    boolean hasDecimal = setNanosBytes(nanos, b, offset+4);
+    setSecondsBytes(millis, b, offset, hasDecimal);
+  }
+
+  /**
+   * Given an integer representing seconds, write its serialized
+   * value to the byte array b at offset
+   * @param millis
+   * @param b
+   * @param offset
+   * @param hasDecimal
+   */
+  private static void setSecondsBytes(long millis, byte[] b, int offset, boolean hasDecimal) {
+    int seconds = (int) (millis / 1000);
+
+    if (!hasDecimal) {
+      seconds &= NO_DECIMAL_MASK;
+    } else {
+      seconds |= HAS_DECIMAL_MASK;
+    }
+
+    intToBytes(seconds, b, offset);
+  }
+
+  /**
+   * Given an integer representing nanoseconds, write its serialized
+   * value to the byte array b at offset
+   *
+   * @param nanos
+   * @param b
+   * @param offset
+   * @return
+   */
+  private static boolean setNanosBytes(int nanos, byte[] b, int offset) {
+    int decimal = 0;
+    if (nanos != 0) {
+      int counter = 0;
+      while (counter < 9) {
+        decimal *= 10;
+        decimal += nanos % 10;
+        nanos /= 10;
+        counter++;
+      }
+    }
+
+    LazyBinaryUtils.writeVLongToByteArray(b, offset, decimal);
+    return decimal != 0;
+  }
+
+  /**
+   * Interprets a float as a unix timestamp and returns a Timestamp object
+   * @param f
+   * @return
+   */
+  public static Timestamp floatToTimestamp(float f) {
+    return doubleToTimestamp((double) f);
+  }
+
+  public static Timestamp doubleToTimestamp(double f) {
+    long seconds = (long) f;
+
+    // We must ensure the exactness of the double's fractional portion.
+    // 0.6 as the fraction part will be converted to 0.59999... and
+    // significantly reduce the savings from binary serializtion
+    BigDecimal bd = new BigDecimal(String.valueOf(f));
+    bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000));
+    int nanos = bd.intValue();
+
+    // Convert to millis
+    long millis = seconds * 1000;
+    Timestamp t = new Timestamp(millis);
+
+    // Set remaining fractional portion to nanos
+    t.setNanos(nanos);
+    return t;
+  }
+
+  public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
+    t.setTime(((long) TimestampWritable.getSeconds(bytes, offset)) * 1000);
+    t.setNanos(TimestampWritable.getNanos(bytes, offset+4));
+  }
+
+  public static Timestamp createTimestamp(byte[] bytes, int offset) {
+    Timestamp t = new Timestamp(0);
+    TimestampWritable.setTimestamp(t, bytes, offset);
+    return t;
+  }
+
+  /**
+   *
+   * @param b first byte in an encoded TimestampWritable
+   * @return true if it has a decimal portion, false otherwise
+   */
+  public static boolean hasDecimal(byte b) {
+    return (b >> 7) != 0;
+  }
+
+  /**
+   * Writes <code>value</code> into <code>dest</code> at <code>offset</code>
+   * @param value
+   * @param dest
+   * @param offset
+   */
+  private static void intToBytes(int value, byte[] dest, int offset) {
+    dest[offset] = (byte) ((value >> 24) & 0xFF);
+    dest[offset+1] = (byte) ((value >> 16) & 0xFF);
+    dest[offset+2] = (byte) ((value >> 8) & 0xFF);
+    dest[offset+3] = (byte) (value & 0xFF);
+  }
+
+  /**
+   *
+   * @param bytes
+   * @param offset
+   * @return integer represented by the four bytes in <code>bytes</code>
+   *  beginning at <code>offset</code>
+   */
+  private static int bytesToInt(byte[] bytes, int offset) {
+    return ((0xFF & bytes[offset]) << 24)
+        | ((0xFF & bytes[offset+1]) << 16)
+        | ((0xFF & bytes[offset+2]) << 8)
+        | (0xFF & bytes[offset+3]);
+  }
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java Fri Aug 12 19:21:36 2011
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -75,6 +76,8 @@ public final class LazyFactory {
       return new LazyDouble((LazyDoubleObjectInspector) oi);
     case STRING:
       return new LazyString((LazyStringObjectInspector) oi);
+    case TIMESTAMP:
+      return new LazyTimestamp((LazyTimestampObjectInspector) oi);
     default:
       throw new RuntimeException("Internal error: no LazyObject for " + p);
     }

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+import java.sql.Timestamp;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
+
+/**
+ *
+ * LazyTimestamp.
+ * Serializes and deserializes a Timestamp in the JDBC timestamp format
+ *
+ *    YYYY-MM-DD HH:MM:SS.[fff...]
+ *
+ */
+public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, TimestampWritable> {
+  static final private Log LOG = LogFactory.getLog(LazyTimestamp.class);
+
+  public LazyTimestamp(LazyTimestampObjectInspector oi) {
+    super(oi);
+    data = new TimestampWritable();
+  }
+
+  public LazyTimestamp(LazyTimestamp copy) {
+    super(copy);
+    data = new TimestampWritable(copy.data);
+  }
+
+  /**
+   * Initilizes LazyTimestamp object by interpreting the input bytes
+   * as a JDBC timestamp string
+   *
+   * @param bytes
+   * @param start
+   * @param length
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+    String s = null;
+    try {
+      s = new String(bytes.getData(), start, length, "US-ASCII");
+    } catch (UnsupportedEncodingException e) {
+      LOG.error(e);
+      s = "";
+    }
+
+    Timestamp t;
+    if (s.compareTo("NULL") == 0) {
+      t = null;
+    } else {
+      t = Timestamp.valueOf(s);
+    }
+    data.set(t);
+  }
+
+  private static final String nullTimestamp = "NULL";
+
+  /**
+   * Writes a Timestamp in JDBC timestamp format to the output stream
+   * @param out
+   *          The output stream
+   * @param i
+   *          The Timestamp to write
+   * @throws IOException
+   */
+  public static void writeUTF8(OutputStream out, TimestampWritable i)
+      throws IOException {
+    if (i == null) {
+      // Serialize as time 0
+      out.write(TimestampWritable.nullBytes);
+    } else {
+      out.write(i.toString().getBytes("US-ASCII"));
+    }
+  }
+
+  @Override
+  public TimestampWritable getWritableObject() {
+    return data;
+  }
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Fri Aug 12 19:21:36 2011
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.Text;
 
@@ -48,7 +49,7 @@ public final class LazyUtils {
 
   /**
    * Returns the digit represented by character b.
-   * 
+   *
    * @param b
    *          The ascii code of the character
    * @param radix
@@ -102,7 +103,7 @@ public final class LazyUtils {
 
   /**
    * Convert a UTF-8 byte array to String.
-   * 
+   *
    * @param bytes
    *          The byte[] containing the UTF-8 String.
    * @param start
@@ -124,7 +125,7 @@ public final class LazyUtils {
 
   /**
    * Write the bytes with special characters escaped.
-   * 
+   *
    * @param escaped
    *          Whether the data should be written out in an escaped way.
    * @param escapeChar
@@ -158,7 +159,7 @@ public final class LazyUtils {
   /**
    * Write out the text representation of a Primitive Object to a UTF8 byte
    * stream.
-   * 
+   *
    * @param out
    *          The UTF8 byte OutputStream
    * @param o
@@ -215,6 +216,11 @@ public final class LazyUtils {
           needsEscape);
       break;
     }
+    case TIMESTAMP: {
+      LazyTimestamp.writeUTF8(out,
+          ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o));
+      break;
+    }
     default: {
       throw new RuntimeException("Hive internal error.");
     }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java Fri Aug 12 19:21:36 2011
@@ -53,6 +53,8 @@ public final class LazyPrimitiveObjectIn
       new LazyDoubleObjectInspector();
   public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR =
       new LazyVoidObjectInspector();
+  public static final LazyTimestampObjectInspector LAZY_TIMESTAMP_OBJECT_INSPECTOR =
+      new LazyTimestampObjectInspector();
 
   static HashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector =
       new HashMap<ArrayList<Object>, LazyStringObjectInspector>();
@@ -93,6 +95,8 @@ public final class LazyPrimitiveObjectIn
       return getLazyStringObjectInspector(escaped, escapeChar);
     case VOID:
       return LAZY_VOID_OBJECT_INSPECTOR;
+    case TIMESTAMP:
+      return LAZY_TIMESTAMP_OBJECT_INSPECTOR;
     default:
       throw new RuntimeException("Internal error: Cannot find ObjectInspector "
           + " for " + primitiveCategory);

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+
+public class LazyTimestampObjectInspector
+    extends AbstractPrimitiveLazyObjectInspector<TimestampWritable>
+    implements TimestampObjectInspector {
+
+  protected LazyTimestampObjectInspector() {
+    super(PrimitiveObjectInspectorUtils.timestampTypeEntry);
+  }
+
+  public Object copyObject(Object o) {
+    return o == null ? null : new LazyTimestamp((LazyTimestamp) o);
+  }
+
+  public Timestamp getPrimitiveJavaObject(Object o) {
+    return o == null ? null : ((LazyTimestamp) o).getWritableObject().getTimestamp();
+  }
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java Fri Aug 12 19:21:36 2011
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableVoidObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
@@ -69,6 +70,8 @@ public final class LazyBinaryFactory {
       return new LazyBinaryString((WritableStringObjectInspector) oi);
     case VOID: // for NULL
       return new LazyBinaryVoid((WritableVoidObjectInspector) oi);
+    case TIMESTAMP:
+      return new LazyBinaryTimestamp((WritableTimestampObjectInspector) oi);
     default:
       throw new RuntimeException("Internal error: no LazyBinaryObject for " + p);
     }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java Fri Aug 12 19:21:36 2011
@@ -29,18 +29,19 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
@@ -49,6 +50,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -218,8 +220,8 @@ public class LazyBinarySerDe implements 
    *          the struct object to serialize
    * @param objInspector
    *          the struct object inspector
-   * @param warnedOnceNullMapKey a boolean indicating whether a warning 
-   *          has been issued once already when encountering null map keys 
+   * @param warnedOnceNullMapKey a boolean indicating whether a warning
+   *          has been issued once already when encountering null map keys
    * @return a boolean indicating whether a warning for null map keys has been issued
    *          once already
    */
@@ -237,6 +239,7 @@ public class LazyBinarySerDe implements 
     int size = fields.size();
     int lasti = 0;
     byte nullByte = 0;
+
     for (int i = 0; i < size; i++) {
       // set bit to 1 if a field is not null
       if (null != soi.getStructFieldData(obj, fields.get(i))) {
@@ -270,8 +273,8 @@ public class LazyBinarySerDe implements 
    *          the object inspector
    * @param skipLengthPrefix a boolean indicating whether length prefix is
    *          needed for list/map/struct
-   * @param warnedOnceNullMapKey a boolean indicating whether a warning 
-   *          has been issued once already when encountering null map keys 
+   * @param warnedOnceNullMapKey a boolean indicating whether a warning
+   *          has been issued once already when encountering null map keys
    * @return a boolean indicating whether a warning for null map keys has been issued
    *          once already
    */
@@ -355,6 +358,12 @@ public class LazyBinarySerDe implements 
         byteStream.write(data, 0, length);
         return warnedOnceNullMapKey;
       }
+      case TIMESTAMP: {
+        TimestampObjectInspector toi = (TimestampObjectInspector) poi;
+        TimestampWritable t = toi.getPrimitiveWritableObject(obj);
+        t.writeToByteStream(byteStream);
+        return warnedOnceNullMapKey;
+      }
       default: {
         throw new RuntimeException("Unrecognized type: "
             + poi.getPrimitiveCategory());
@@ -398,7 +407,7 @@ public class LazyBinarySerDe implements 
 
       // 4/ write element by element from the list
       for (int eid = 0; eid < size; eid++) {
-        warnedOnceNullMapKey = serialize(byteStream, loi.getListElement(obj, eid), eoi, 
+        warnedOnceNullMapKey = serialize(byteStream, loi.getListElement(obj, eid), eoi,
             false, warnedOnceNullMapKey);
       }
 
@@ -432,7 +441,7 @@ public class LazyBinarySerDe implements 
         byteStream.write((byte) 0);
         mapStart = byteStream.getCount();
       }
-      
+
       // 2/ write the size of the map which is a VInt
       int size = map.size();
       LazyBinaryUtils.writeVInt(byteStream, size);

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazybinary;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
+
+/**
+ * LazyBinaryTimestamp
+ * A LazyBinaryObject that encodes a java.sql.Timestamp 4 to 9 bytes.
+ *
+ */
+public class LazyBinaryTimestamp extends
+    LazyBinaryPrimitive<WritableTimestampObjectInspector, TimestampWritable> {
+  static final Log LOG = LogFactory.getLog(LazyBinaryTimestamp.class);
+
+  LazyBinaryTimestamp(WritableTimestampObjectInspector oi) {
+    super(oi);
+    data = new TimestampWritable();
+  }
+
+  LazyBinaryTimestamp(LazyBinaryTimestamp copy) {
+    super(copy);
+    data = new TimestampWritable(copy.data);
+  }
+
+  /**
+   * Initializes LazyBinaryTimestamp object
+   * @param bytes
+   * @param start
+   * @param length
+   *    If length is 4, no decimal bytes follow, otherwise read following bytes
+   *    as VInt and reverse its value
+   */
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+    data.set(bytes.getData(), start);
+  }
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java Fri Aug 12 19:21:36 2011
@@ -24,6 +24,7 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -133,6 +134,8 @@ public final class LazyBinaryUtils {
    * bytes are used to store the size. So the offset is 4 and the size is
    * computed by concating the first four bytes together. The first four bytes
    * are defined with respect to the offset in the bytes arrays.
+   * For timestamp, if the first bit is 0, the record length is 4, otherwise
+   * a VInt begins at the 5th byte and its length is added to 4.
    *
    * @param objectInspector
    *          object inspector of the field
@@ -186,6 +189,13 @@ public final class LazyBinaryUtils {
         recordInfo.elementOffset = vInt.length;
         recordInfo.elementSize = vInt.value;
         break;
+      case TIMESTAMP:
+        recordInfo.elementOffset = 0;
+        recordInfo.elementSize = 4;
+        if(TimestampWritable.hasDecimal(bytes[offset])) {
+          recordInfo.elementSize += (byte) WritableUtils.decodeVIntSize(bytes[offset+4]);
+        }
+        break;
       default: {
         throw new RuntimeException("Unrecognized primitive type: "
             + primitiveCategory);
@@ -302,10 +312,14 @@ public final class LazyBinaryUtils {
    * @param l
    *          the long
    */
-  public static void writeVLong(Output byteStream, long l) {
+  public static int writeVLongToByteArray(byte[] bytes, long l) {
+    return LazyBinaryUtils.writeVLongToByteArray(bytes, 0, l);
+  }
+
+  public static int writeVLongToByteArray(byte[] bytes, int offset, long l) {
     if (l >= -112 && l <= 127) {
-      byteStream.write((byte) l);
-      return;
+      bytes[offset] = (byte) l;
+      return 1;
     }
 
     int len = -112;
@@ -320,15 +334,23 @@ public final class LazyBinaryUtils {
       len--;
     }
 
-    byteStream.write((byte) len);
+    bytes[offset] = (byte) len;
 
     len = (len < -120) ? -(len + 120) : -(len + 112);
 
     for (int idx = len; idx != 0; idx--) {
       int shiftbits = (idx - 1) * 8;
       long mask = 0xFFL << shiftbits;
-      byteStream.write((byte) ((l & mask) >> shiftbits));
+      bytes[offset+1-(idx - len)] = (byte) ((l & mask) >> shiftbits);
     }
+    return 1 + len;
+  }
+
+  private static byte[] vLongBytes = new byte[9];
+
+  public static void writeVLong(Output byteStream, long l) {
+    int len = LazyBinaryUtils.writeVLongToByteArray(vLongBytes, l);
+    byteStream.write(vLongBytes, 0, len);
   }
 
   static HashMap<TypeInfo, ObjectInspector> cachedLazyBinaryObjectInspector = new HashMap<TypeInfo, ObjectInspector>();

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java Fri Aug 12 19:21:36 2011
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
 
 /**
@@ -107,6 +108,10 @@ public final class ObjectInspectorConver
           return new PrimitiveObjectInspectorConverter.StringConverter(
               (PrimitiveObjectInspector) inputOI);
         }
+      case TIMESTAMP:
+        return new PrimitiveObjectInspectorConverter.TimestampConverter(
+            (PrimitiveObjectInspector) inputOI,
+            (SettableTimestampObjectInspector) outputOI);
       default:
         throw new RuntimeException("Hive internal error: conversion of "
             + inputOI.getTypeName() + " to " + outputOI.getTypeName()

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Fri Aug 12 19:21:36 2011
@@ -30,6 +30,7 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
@@ -41,6 +42,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
@@ -440,6 +442,10 @@ public final class ObjectInspectorUtils 
         }
         return r;
       }
+      case TIMESTAMP:
+        TimestampWritable t = ((TimestampObjectInspector) poi)
+            .getPrimitiveWritableObject(o);
+        return t.hashCode();
       default: {
         throw new RuntimeException("Unknown type: "
             + poi.getPrimitiveCategory());
@@ -592,6 +598,13 @@ public final class ObjectInspectorUtils 
               .compareTo(s2));
         }
       }
+      case TIMESTAMP: {
+        TimestampWritable t1 = ((TimestampObjectInspector) poi1)
+            .getPrimitiveWritableObject(o1);
+        TimestampWritable t2 = ((TimestampObjectInspector) poi2)
+            .getPrimitiveWritableObject(o2);
+        return t1.compareTo(t2);
+      }
       default: {
         throw new RuntimeException("Unknown type: "
             + poi1.getPrimitiveCategory());

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java Fri Aug 12 19:21:36 2011
@@ -27,7 +27,7 @@ public interface PrimitiveObjectInspecto
    * The primitive types supported by Hive.
    */
   public static enum PrimitiveCategory {
-    VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, UNKNOWN
+    VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, TIMESTAMP, UNKNOWN
   };
 
   /**
@@ -61,7 +61,7 @@ public interface PrimitiveObjectInspecto
   /**
    * Get a copy of the Object in the same class, so the return value can be
    * stored independently of the parameter.
-   * 
+   *
    * If the Object is a Primitive Java Object, we just return the parameter
    * since Primitive Java Object is immutable.
    */

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class JavaTimestampObjectInspector
+    extends AbstractPrimitiveJavaObjectInspector
+    implements SettableTimestampObjectInspector {
+
+  protected JavaTimestampObjectInspector() {
+    super(PrimitiveObjectInspectorUtils.timestampTypeEntry);
+  }
+
+  public TimestampWritable getPrimitiveWritableObject(Object o) {
+    return o == null ? null : new TimestampWritable((Timestamp) o);
+  }
+
+  @Override
+  public Timestamp getPrimitiveJavaObject(Object o) {
+    return o == null ? null : (Timestamp) o;
+  }
+
+  public Timestamp get(Object o) {
+    return (Timestamp) o;
+  }
+
+  public Object set(Object o, Timestamp value) {
+    ((Timestamp) o).setTime(value.getTime());
+    return o;
+  }
+
+  public Object set(Object o, byte[] bytes, int offset) {
+    TimestampWritable.setTimestamp((Timestamp) o, bytes, offset);
+    return o;
+  }
+
+  public Object set(Object o, TimestampWritable tw) {
+    Timestamp t = (Timestamp) o;
+    t.setTime(tw.getTimestamp().getTime());
+    t.setNanos(tw.getTimestamp().getNanos());
+    return t;
+  }
+
+  public Object create(Timestamp value) {
+    return new Timestamp(value.getTime());
+  }
+
+  public Object create(byte[] bytes, int offset) {
+    return TimestampWritable.createTimestamp(bytes, offset);
+  }
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java Fri Aug 12 19:21:36 2011
@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -234,13 +236,34 @@ public class PrimitiveObjectInspectorCon
     }
   }
 
+  public static class TimestampConverter implements Converter {
+    PrimitiveObjectInspector inputOI;
+    SettableTimestampObjectInspector outputOI;
+    Object r;
+
+    public TimestampConverter(PrimitiveObjectInspector inputOI,
+        SettableTimestampObjectInspector outputOI) {
+      this.inputOI = inputOI;
+      this.outputOI = outputOI;
+      r = outputOI.create(new Timestamp(0));
+    }
+
+    public Object convert(Object input) {
+      if (input == null) {
+        return null;
+      }
+      return outputOI.set(r, PrimitiveObjectInspectorUtils.getTimestamp(input,
+          inputOI));
+    }
+  }
+
   /**
    * A helper class to convert any primitive to Text.
    */
   public static class TextConverter implements Converter {
-    private PrimitiveObjectInspector inputOI;
-    private Text t = new Text();
-    private ByteStream.Output out = new ByteStream.Output();
+    private final PrimitiveObjectInspector inputOI;
+    private final Text t = new Text();
+    private final ByteStream.Output out = new ByteStream.Output();
 
     private static byte[] trueBytes = {'T', 'R', 'U', 'E'};
     private static byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'};
@@ -291,6 +314,10 @@ public class PrimitiveObjectInspectorCon
       case STRING:
         t.set(((StringObjectInspector) inputOI).getPrimitiveJavaObject(input));
         return t;
+      case TIMESTAMP:
+        t.set(((TimestampObjectInspector) inputOI)
+            .getPrimitiveWritableObject(input).toString());
+        return t;
       default:
         throw new RuntimeException("Hive 2 Internal error: type = " + inputOI.getTypeName());
       }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java Fri Aug 12 19:21:36 2011
@@ -28,7 +28,7 @@ import org.apache.hadoop.io.Writable;
 /**
  * PrimitiveObjectInspectorFactory is the primary way to create new
  * PrimitiveObjectInspector instances.
- * 
+ *
  * The reason of having caches here is that ObjectInspector is because
  * ObjectInspectors do not have an internal state - so ObjectInspectors with the
  * same construction parameters should result in exactly the same
@@ -54,6 +54,8 @@ public final class PrimitiveObjectInspec
       new JavaStringObjectInspector();
   public static final JavaVoidObjectInspector javaVoidObjectInspector =
       new JavaVoidObjectInspector();
+  public static final JavaTimestampObjectInspector javaTimestampObjectInspector =
+      new JavaTimestampObjectInspector();
 
   public static final WritableBooleanObjectInspector writableBooleanObjectInspector =
       new WritableBooleanObjectInspector();
@@ -73,6 +75,8 @@ public final class PrimitiveObjectInspec
       new WritableStringObjectInspector();
   public static final WritableVoidObjectInspector writableVoidObjectInspector =
       new WritableVoidObjectInspector();
+  public static final WritableTimestampObjectInspector writableTimestampObjectInspector =
+      new WritableTimestampObjectInspector();
 
   private static HashMap<PrimitiveCategory, AbstractPrimitiveWritableObjectInspector> cachedPrimitiveWritableInspectorCache =
       new HashMap<PrimitiveCategory, AbstractPrimitiveWritableObjectInspector>();
@@ -95,6 +99,8 @@ public final class PrimitiveObjectInspec
         writableStringObjectInspector);
     cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.VOID,
         writableVoidObjectInspector);
+    cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.TIMESTAMP,
+        writableTimestampObjectInspector);
   }
 
   private static HashMap<PrimitiveCategory, AbstractPrimitiveJavaObjectInspector> cachedPrimitiveJavaInspectorCache =
@@ -118,11 +124,13 @@ public final class PrimitiveObjectInspec
         javaStringObjectInspector);
     cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.VOID,
         javaVoidObjectInspector);
+    cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.TIMESTAMP,
+        javaTimestampObjectInspector);
   }
 
   /**
    * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.
-   * 
+   *
    * @param primitiveCategory
    */
   public static AbstractPrimitiveWritableObjectInspector getPrimitiveWritableObjectInspector(
@@ -138,7 +146,7 @@ public final class PrimitiveObjectInspec
 
   /**
    * Returns the PrimitiveJavaObjectInspector for the PrimitiveCategory.
-   * 
+   *
    * @param primitiveCategory
    */
   public static AbstractPrimitiveJavaObjectInspector getPrimitiveJavaObjectInspector(
@@ -155,7 +163,7 @@ public final class PrimitiveObjectInspec
   /**
    * Returns an ObjectInspector for a primitive Class. The Class can be a Hive
    * Writable class, or a Java Primitive Class.
-   * 
+   *
    * A runtimeException will be thrown if the class is not recognized as a
    * primitive type by Hive.
    */

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java Fri Aug 12 19:21:36 2011
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.serde2.ob
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.sql.Timestamp;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -28,6 +29,7 @@ import org.apache.hadoop.hive.serde.Cons
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -44,7 +46,7 @@ import org.apache.hadoop.io.WritableUtil
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
  */
@@ -167,6 +169,9 @@ public final class PrimitiveObjectInspec
   public static final PrimitiveTypeEntry shortTypeEntry = new PrimitiveTypeEntry(
       PrimitiveCategory.SHORT, Constants.SMALLINT_TYPE_NAME, Short.TYPE,
       Short.class, ShortWritable.class);
+  public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry(
+      PrimitiveCategory.TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME, null,
+      Object.class, TimestampWritable.class);
 
   // The following is a complex type for special handling
   public static final PrimitiveTypeEntry unknownTypeEntry = new PrimitiveTypeEntry(
@@ -182,6 +187,7 @@ public final class PrimitiveObjectInspec
     registerType(doubleTypeEntry);
     registerType(byteTypeEntry);
     registerType(shortTypeEntry);
+    registerType(timestampTypeEntry);
     registerType(unknownTypeEntry);
   }
 
@@ -341,6 +347,10 @@ public final class PrimitiveObjectInspec
           .getPrimitiveWritableObject(o2);
       return t1.equals(t2);
     }
+    case TIMESTAMP: {
+      return ((TimestampObjectInspector) oi1).getPrimitiveWritableObject(o1)
+          .equals(((TimestampObjectInspector) oi2).getPrimitiveWritableObject(o2));
+    }
     default:
       return false;
     }
@@ -367,6 +377,9 @@ public final class PrimitiveObjectInspec
       return ((DoubleObjectInspector) oi).get(o);
     case STRING:
       return Double.valueOf(((StringObjectInspector) oi).getPrimitiveJavaObject(o));
+    case TIMESTAMP:
+      return ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
+          .getDouble();
     default:
       throw new NumberFormatException();
     }
@@ -437,6 +450,10 @@ public final class PrimitiveObjectInspec
         result = s.length() != 0;
       }
       break;
+    case TIMESTAMP:
+      result = (((TimestampObjectInspector) oi)
+          .getPrimitiveWritableObject(o).getSeconds() != 0);
+      break;
     default:
       throw new RuntimeException("Hive 2 Internal error: unknown type: "
           + oi.getTypeName());
@@ -513,6 +530,10 @@ public final class PrimitiveObjectInspec
       }
       break;
     }
+    case TIMESTAMP:
+      result = (int) (((TimestampObjectInspector) oi)
+          .getPrimitiveWritableObject(o).getSeconds());
+      break;
     default: {
       throw new RuntimeException("Hive 2 Internal error: unknown type: "
           + oi.getTypeName());
@@ -563,6 +584,10 @@ public final class PrimitiveObjectInspec
         result = Long.parseLong(s);
       }
       break;
+    case TIMESTAMP:
+      result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
+          .getSeconds();
+      break;
     default:
       throw new RuntimeException("Hive 2 Internal error: unknown type: "
           + oi.getTypeName());
@@ -607,6 +632,9 @@ public final class PrimitiveObjectInspec
       String s = soi.getPrimitiveJavaObject(o);
       result = Double.parseDouble(s);
       break;
+    case TIMESTAMP:
+      result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getDouble();
+      break;
     default:
       throw new RuntimeException("Hive 2 Internal error: unknown type: "
           + oi.getTypeName());
@@ -664,6 +692,67 @@ public final class PrimitiveObjectInspec
       StringObjectInspector soi = (StringObjectInspector) oi;
       result = soi.getPrimitiveJavaObject(o);
       break;
+    case TIMESTAMP:
+      result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).toString();
+      break;
+    default:
+      throw new RuntimeException("Hive 2 Internal error: unknown type: "
+          + oi.getTypeName());
+    }
+    return result;
+  }
+
+  public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector oi) {
+    if (o == null) {
+      return null;
+    }
+
+    Timestamp result = null;
+    switch (oi.getPrimitiveCategory()) {
+    case VOID:
+      result = null;
+      break;
+    case BOOLEAN:
+      result = new Timestamp(((BooleanObjectInspector) oi).get(o) ? 1 : 0);
+      break;
+    case BYTE:
+      result = new Timestamp(((ByteObjectInspector) oi).get(o));
+      break;
+    case SHORT:
+      result = new Timestamp(((ShortObjectInspector) oi).get(o));
+      break;
+    case INT:
+      result = new Timestamp(((IntObjectInspector) oi).get(o));
+      break;
+    case LONG:
+      result = new Timestamp(((LongObjectInspector) oi).get(o));
+      break;
+    case FLOAT:
+      result = TimestampWritable.floatToTimestamp(((FloatObjectInspector) oi).get(o));
+      break;
+    case DOUBLE:
+      result = TimestampWritable.doubleToTimestamp(((DoubleObjectInspector) oi).get(o));
+      break;
+    case STRING:
+      StringObjectInspector soi = (StringObjectInspector) oi;
+      String s = soi.getPrimitiveJavaObject(o).trim();
+
+      // Throw away extra if more than 9 decimal places
+      int periodIdx = s.indexOf(".");
+      if (periodIdx != -1) {
+        if (s.length() - periodIdx > 9) {
+          s = s.substring(0, periodIdx + 10);
+        }
+      }
+      try {
+        result = Timestamp.valueOf(s);
+      } catch (IllegalArgumentException e) {
+        result = null;
+      }
+      break;
+    case TIMESTAMP:
+      result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getTimestamp();
+      break;
     default:
       throw new RuntimeException("Hive 2 Internal error: unknown type: "
           + oi.getTypeName());

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+
+public interface SettableTimestampObjectInspector extends TimestampObjectInspector {
+
+  Object set(Object o, byte[] bytes, int offset);
+
+  Object set(Object o, Timestamp t);
+
+  Object set(Object o, TimestampWritable t);
+
+  Object create(byte[] bytes, int offset);
+
+  Object create (Timestamp t);
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+public interface TimestampObjectInspector extends PrimitiveObjectInspector {
+
+  TimestampWritable getPrimitiveWritableObject(Object o);
+
+  Timestamp getPrimitiveJavaObject(Object o);
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java?rev=1157222&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java Fri Aug 12 19:21:36 2011
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class WritableTimestampObjectInspector extends
+    AbstractPrimitiveWritableObjectInspector implements
+    SettableTimestampObjectInspector {
+
+  public WritableTimestampObjectInspector() {
+    super(PrimitiveObjectInspectorUtils.timestampTypeEntry);
+  }
+
+  @Override
+  public TimestampWritable getPrimitiveWritableObject(Object o) {
+    return o == null ? null : (TimestampWritable) o;
+  }
+
+  public Timestamp getPrimitiveJavaObject(Object o) {
+    return o == null ? null : ((TimestampWritable) o).getTimestamp();
+  }
+
+  public Object copyObject(Object o) {
+    return o == null ? null : new TimestampWritable((TimestampWritable) o);
+  }
+
+  public Object set(Object o, byte[] bytes, int offset) {
+    ((TimestampWritable) o).set(bytes, offset);
+    return o;
+  }
+
+  public Object set(Object o, Timestamp t) {
+    ((TimestampWritable) o).set(t);
+    return o;
+  }
+
+  public Object set(Object o, TimestampWritable t) {
+    ((TimestampWritable) o).set(t);
+    return o;
+  }
+
+  public Object create(byte[] bytes, int offset) {
+    return new TimestampWritable(bytes, offset);
+  }
+
+  public Object create(Timestamp t) {
+    return new TimestampWritable(t);
+  }
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java?rev=1157222&r1=1157221&r2=1157222&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java Fri Aug 12 19:21:36 2011
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.serde2.obj
 
 /**
  * TypeInfoFactory can be used to create the TypeInfo object for any types.
- * 
+ *
  * TypeInfo objects are all read-only so we can reuse them easily.
  * TypeInfoFactory has internal cache to make sure we don't create 2 TypeInfo
  * objects that represents the same type.
@@ -62,6 +62,7 @@ public final class TypeInfoFactory {
   public static final TypeInfo doubleTypeInfo = getPrimitiveTypeInfo(Constants.DOUBLE_TYPE_NAME);
   public static final TypeInfo byteTypeInfo = getPrimitiveTypeInfo(Constants.TINYINT_TYPE_NAME);
   public static final TypeInfo shortTypeInfo = getPrimitiveTypeInfo(Constants.SMALLINT_TYPE_NAME);
+  public static final TypeInfo timestampTypeInfo = getPrimitiveTypeInfo(Constants.TIMESTAMP_TYPE_NAME);
 
   public static final TypeInfo unknownTypeInfo = getPrimitiveTypeInfo("unknown");