You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2016/04/19 12:12:55 UTC

[01/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Repository: hive
Updated Branches:
  refs/heads/branch-1 f42b984bd -> 130293e56


http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vector_udf1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_udf1.q.out b/ql/src/test/results/clientpositive/vector_udf1.q.out
index bb02ea7..748276f 100644
--- a/ql/src/test/results/clientpositive/vector_udf1.q.out
+++ b/ql/src/test/results/clientpositive/vector_udf1.q.out
@@ -62,10 +62,9 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
 
   Stage: Stage-0
     Fetch Operator
@@ -126,10 +125,9 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
 
   Stage: Stage-0
     Fetch Operator
@@ -190,10 +188,9 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
 
   Stage: Stage-0
     Fetch Operator
@@ -256,8 +253,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -319,8 +316,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -382,8 +379,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -445,8 +442,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -508,8 +505,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -572,8 +569,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -636,8 +633,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -699,8 +696,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -763,8 +760,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -826,8 +823,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -890,8 +887,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -954,8 +951,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1017,8 +1014,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1080,8 +1077,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -1142,8 +1139,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1201,8 +1198,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1260,8 +1257,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1321,8 +1318,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -1385,8 +1382,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -1461,8 +1458,8 @@ STAGE PLANS:
             compressed: false
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1485,7 +1482,7 @@ from varchar_udf_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@varchar_udf_1
 #### A masked pattern was here ####
-{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}
+{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}
 PREHOOK: query: explain
 select
   min(c2),
@@ -1511,10 +1508,10 @@ STAGE PLANS:
             Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: c2 (type: string), c4 (type: varchar(20))
-              outputColumnNames: c2, c4
+              outputColumnNames: _col0, _col1
               Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                aggregations: min(c2), min(c4)
+                aggregations: min(_col0), min(_col1)
                 mode: hash
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
@@ -1533,8 +1530,8 @@ STAGE PLANS:
             compressed: false
             Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
             table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1583,10 +1580,10 @@ STAGE PLANS:
             Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: c2 (type: string), c4 (type: varchar(20))
-              outputColumnNames: c2, c4
+              outputColumnNames: _col0, _col1
               Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                aggregations: max(c2), max(c4)
+                aggregations: max(_col0), max(_col1)
                 mode: hash
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
@@ -1605,8 +1602,8 @@ STAGE PLANS:
             compressed: false
             Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
             table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vectorized_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_casts.q.out b/ql/src/test/results/clientpositive/vectorized_casts.q.out
index 1113453..630be88 100644
--- a/ql/src/test/results/clientpositive/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_casts.q.out
@@ -340,18 +340,18 @@ true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-64615982	1803053750
 true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	890988972	-1862301000	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	8.90988972E8	-1.862301E9	8.0	NULL	1.0	15.892	NULL	NULL	8.9098899E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	890988972	-1862301000	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	XylAH4	8.0	1.781977944E9	0.9893582466233818	8.90988973E8
 true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	930867246	1205399250	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	9.30867246E8	1.20539925E9	8.0	NULL	1.0	15.892	NULL	NULL	9.3086726E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	930867246	1205399250	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	c1V8o1A	8.0	1.861734492E9	0.9893582466233818	9.30867247E8
 true	true	NULL	true	true	true	NULL	false	true	NULL	-14	-7196	NULL	-1552199500	-14	-7196	NULL	11	NULL	NULL	-14	-14	-14	-14.0	-7196.0	NULL	-1.5521995E9	-14.0	-7196.0	NULL	11.065	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL	-14	-7196	NULL	-1552199500	-14.0	-7196.0	NULL	0	1969-12-31 16:00:11.065	NULL	-14.0	NULL	-0.9906073556948704	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-4	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-4.1	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	-21.0	NULL	-0.8366556385360561	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-6	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-6.855	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	-24.0	NULL	0.9055783620066238	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-5	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-4.1	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	-21.0	NULL	-0.8366556385360561	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-7	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-6.855	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	-24.0	NULL	0.9055783620066238	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-30	-200	NULL	1429852250	-30	-200	NULL	12	NULL	NULL	-30	-30	-30	-30.0	-200.0	NULL	1.42985225E9	-30.0	-200.0	NULL	12.935	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL	-30	-200	NULL	1429852250	-30.0	-200.0	NULL	0	1969-12-31 16:00:12.935	NULL	-30.0	NULL	0.9880316240928618	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-14	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-14.252	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-6	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-6.183	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-1	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-1.386	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	-38.0	NULL	-0.2963685787093853	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-15	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-14.252	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-7	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-6.183	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-2	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-1.3860000000000001	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	-38.0	NULL	-0.2963685787093853	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-5	15601	NULL	612416000	-5	15601	NULL	4	NULL	NULL	-5	-5	-5	-5.0	15601.0	NULL	6.12416E8	-5.0	15601.0	NULL	4.679	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL	-5	15601	NULL	612416000	-5.0	15601.0	NULL	0	1969-12-31 16:00:04.679	NULL	-5.0	NULL	0.9589242746631385	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-5	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-5.267	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	-50.0	NULL	0.26237485370392877	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-6	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-5.267	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	-50.0	NULL	0.26237485370392877	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-59	-7196	NULL	-1604890000	-59	-7196	NULL	13	NULL	NULL	-59	-59	-59	-59.0	-7196.0	NULL	-1.60489E9	-59.0	-7196.0	NULL	13.15	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL	-59	-7196	NULL	-1604890000	-59.0	-7196.0	NULL	0	1969-12-31 16:00:13.15	NULL	-59.0	NULL	-0.6367380071391379	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-7	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-7.592	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	-60.0	NULL	0.3048106211022167	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-8	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-7.592	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	-60.0	NULL	0.3048106211022167	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-8	-7196	NULL	-1849991500	-8	-7196	NULL	3	NULL	NULL	-8	-8	-8	-8.0	-7196.0	NULL	-1.8499915E9	-8.0	-7196.0	NULL	3.136	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL	-8	-7196	NULL	-1849991500	-8.0	-7196.0	NULL	0	1969-12-31 16:00:03.136	NULL	-8.0	NULL	-0.9893582466233818	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-14	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-14.871	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	20.0	NULL	0.9129452507276277	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-9	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-9.765	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	48.0	NULL	-0.7682546613236668	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-15	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-14.871	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	20.0	NULL	0.9129452507276277	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-10	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-9.765	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	48.0	NULL	-0.7682546613236668	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	5	-7196	NULL	-1015607500	5	-7196	NULL	10	NULL	NULL	5	5	5	5.0	-7196.0	NULL	-1.0156075E9	5.0	-7196.0	NULL	10.973	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL	5	-7196	NULL	-1015607500	5.0	-7196.0	NULL	0	1969-12-31 16:00:10.973	NULL	5.0	NULL	-0.9589242746631385	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	59	-7196	NULL	-1137754500	59	-7196	NULL	10	NULL	NULL	59	59	59	59.0	-7196.0	NULL	-1.1377545E9	59.0	-7196.0	NULL	10.956	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL	59	-7196	NULL	-1137754500	59.0	-7196.0	NULL	0	1969-12-31 16:00:10.956	NULL	59.0	NULL	0.6367380071391379	NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
new file mode 100644
index 0000000..c04428e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
@@ -0,0 +1,239 @@
+PREHOOK: query: DROP TABLE IF EXISTS test
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS test
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE test(ts TIMESTAMP) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test
+POSTHOOK: query: CREATE TABLE test(ts TIMESTAMP) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test
+PREHOOK: query: INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test
+POSTHOOK: query: INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test
+POSTHOOK: Lineage: test.ts EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: EXPLAIN
+SELECT ts FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT ts FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT ts FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT ts FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+9999-12-31 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(_col0), max(_col0)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: timestamp), _col1 (type: timestamp)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0), max(VALUE._col1)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: timestamp), _col1 (type: timestamp), (_col1 - _col0) (type: interval_day_time)
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT ts FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT ts FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT ts FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT ts FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+9999-12-31 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(_col0), max(_col0)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: timestamp), _col1 (type: timestamp)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0), max(VALUE._col1)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: timestamp), _col1 (type: timestamp), (_col1 - _col0) (type: interval_day_time)
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
index 3779f1a..5a7feb5 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
@@ -349,19 +349,6 @@ public class BinarySortableSerializeWrite implements SerializeWrite {
     BinarySortableSerDe.serializeHiveIntervalDayTime(output, vidt, invert);
   }
 
-  @Override
-  public void writeHiveIntervalDayTime(long totalNanos) throws IOException {
-    final boolean invert = columnSortOrderIsDesc[++index];
-
-    // This field is not a null.
-    BinarySortableSerDe.writeByte(output, (byte) 1, invert);
-
-    long totalSecs = DateUtils.getIntervalDayTimeTotalSecondsFromTotalNanos(totalNanos);
-    int nanos = DateUtils.getIntervalDayTimeNanosFromTotalNanos(totalNanos);
-    BinarySortableSerDe.serializeLong(output, totalSecs, invert);
-    BinarySortableSerDe.serializeInt(output, nanos, invert);
-  }
-
   /*
    * DECIMAL.
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
index e6fb8b6..0c70fda 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
@@ -145,9 +145,6 @@ public interface SerializeWrite {
    */
   void writeHiveIntervalDayTime(HiveIntervalDayTime vidt) throws IOException;
 
-  // We provide a faster way to write a hive interval day time without a HiveIntervalDayTime object.
-  void writeHiveIntervalDayTime(long totalNanos) throws IOException;
-
   /*
    * DECIMAL.
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
index a2a6c79..f0201d8 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
@@ -25,6 +25,7 @@ import java.math.BigDecimal;
 import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Date;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
@@ -150,6 +151,21 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     }
   }
 
+  public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) {
+    ((Date) timestamp).setTime(secondsAsMillis);
+    timestamp.setNanos(nanos);
+  }
+
+  public void setInternal(long secondsAsMillis, int nanos) {
+
+    // This is our way of documenting that we are MUTATING the contents of
+    // this writable's internal timestamp.
+    updateTimestamp(timestamp, secondsAsMillis, nanos);
+
+    bytesEmpty = true;
+    timestampEmpty = false;
+  }
+
   private void clearTimestamp() {
     timestampEmpty = true;
   }
@@ -306,7 +322,20 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     return seconds + nanos / 1000000000;
   }
 
+  public static long getLong(Timestamp timestamp) {
+    return timestamp.getTime() / 1000;
+  }
 
+  /**
+  *
+  * @return double representation of the timestamp, accurate to nanoseconds
+  */
+ public static double getDouble(Timestamp timestamp) {
+   double seconds, nanos;
+   seconds = millisToSeconds(timestamp.getTime());
+   nanos = timestamp.getNanos();
+   return seconds + nanos / 1000000000;
+ }
 
   public void readFields(DataInput in) throws IOException {
     in.readFully(internalBytes, 0, 4);
@@ -526,6 +555,21 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     return t;
   }
 
+  public HiveDecimal getHiveDecimal() {
+    if (timestampEmpty) {
+      populateTimestamp();
+    }
+    return getHiveDecimal(timestamp);
+  }
+
+  public static HiveDecimal getHiveDecimal(Timestamp timestamp) {
+    // The BigDecimal class recommends not converting directly from double to BigDecimal,
+    // so we convert through a string...
+    Double timestampDouble = TimestampWritable.getDouble(timestamp);
+    HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
+    return result;
+  }
+
   /**
    * Converts the time in seconds or milliseconds to a timestamp.
    * @param time time in seconds or in milliseconds
@@ -536,6 +580,17 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
       return new Timestamp(intToTimestampInSeconds ?  time * 1000 : time);
   }
 
+  /**
+   * Converts the time in seconds or milliseconds to a timestamp.
+   * @param time time in seconds or in milliseconds
+   * @return the timestamp
+   */
+  public static void setTimestampFromLong(Timestamp timestamp, long time,
+      boolean intToTimestampInSeconds) {
+      // If the time is in seconds, converts it to milliseconds first.
+    timestamp.setTime(intToTimestampInSeconds ?  time * 1000 : time);
+  }
+
   public static Timestamp doubleToTimestamp(double f) {
     long seconds = (long) f;
 
@@ -559,6 +614,37 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     return t;
   }
 
+  public static void setTimestampFromDouble(Timestamp timestamp, double f) {
+    // Otherwise, BigDecimal throws an exception.  (Support vector operations that sometimes
+    // do work on double Not-a-Number NaN values).
+    if (Double.isNaN(f)) {
+      timestamp.setTime(0);
+      return;
+    }
+    // Algorithm used by TimestampWritable.doubleToTimestamp method.
+    // Allocates a BigDecimal object!
+
+    long seconds = (long) f;
+
+    // We must ensure the exactness of the double's fractional portion.
+    // 0.6 as the fraction part will be converted to 0.59999... and
+    // significantly reduce the savings from binary serialization
+    BigDecimal bd = new BigDecimal(String.valueOf(f));
+    bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000));
+    int nanos = bd.intValue();
+
+    // Convert to millis
+    long millis = seconds * 1000;
+    if (nanos < 0) {
+      millis -= 1000;
+      nanos += 1000000000;
+    }
+    timestamp.setTime(millis);
+
+    // Set remaining fractional portion to nanos
+    timestamp.setNanos(nanos);
+  }
+
   public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
     boolean hasDecimalOrSecondVInt = hasDecimalOrSecondVInt(bytes[offset]);
     long seconds = (long) TimestampWritable.getSeconds(bytes, offset);
@@ -656,7 +742,7 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
    * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of
    * seconds. 500 would round to 0, -500 would round to -1.
    */
-  static long millisToSeconds(long millis) {
+  public static long millisToSeconds(long millis) {
     if (millis >= 0) {
       return millis / 1000;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index 46f37eb..0f6c6a6 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -473,26 +473,6 @@ public class LazySimpleSerializeWrite implements SerializeWrite {
     index++;
   }
 
-  @Override
-  public void writeHiveIntervalDayTime(long totalNanos) throws IOException {
-
-    if (index > 0) {
-      output.write(separator);
-    }
-
-    if (hiveIntervalDayTime == null) {
-      hiveIntervalDayTime = new HiveIntervalDayTime();
-    }
-    if (hiveIntervalDayTimeWritable == null) {
-      hiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
-    }
-    DateUtils.setIntervalDayTimeTotalNanos(hiveIntervalDayTime, totalNanos);
-    hiveIntervalDayTimeWritable.set(hiveIntervalDayTime);
-    LazyHiveIntervalDayTime.writeUTF8(output, hiveIntervalDayTimeWritable);
-
-    index++;
-  }
-
   /*
    * DECIMAL.
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index 2d201ec..56134d7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -673,42 +673,6 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
     }
   }
 
-  @Override
-  public void writeHiveIntervalDayTime(long totalNanos) throws IOException {
-
-    // Every 8 fields we write a NULL byte.
-    if ((fieldIndex % 8) == 0) {
-      if (fieldIndex > 0) {
-        // Write back previous 8 field's NULL byte.
-        output.writeByte(nullOffset, nullByte);
-        nullByte = 0;
-        nullOffset = output.getLength();
-      }
-      // Allocate next NULL byte.
-      output.reserve(1);
-    }
-
-    // Set bit in NULL byte when a field is NOT NULL.
-    nullByte |= 1 << (fieldIndex % 8);
-
-    if (hiveIntervalDayTime == null) {
-      hiveIntervalDayTime = new HiveIntervalDayTime();
-    }
-    if (hiveIntervalDayTimeWritable == null) {
-      hiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
-    }
-    DateUtils.setIntervalDayTimeTotalNanos(hiveIntervalDayTime, totalNanos);
-    hiveIntervalDayTimeWritable.set(hiveIntervalDayTime);
-    hiveIntervalDayTimeWritable.writeToByteStream(output);
-
-    fieldIndex++;
-
-    if (fieldIndex == fieldCount) {
-      // Write back the final NULL byte before the last fields.
-      output.writeByte(nullOffset, nullByte);
-    }
-  }
-
   /*
    * DECIMAL.
    */


[03/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/tez/vector_join_part_col_char.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vector_join_part_col_char.q.out b/ql/src/test/results/clientpositive/tez/vector_join_part_col_char.q.out
index f3d5931..f861103 100644
--- a/ql/src/test/results/clientpositive/tez/vector_join_part_col_char.q.out
+++ b/ql/src/test/results/clientpositive/tez/vector_join_part_col_char.q.out
@@ -87,111 +87,137 @@ PREHOOK: Input: default@char_tbl1
 POSTHOOK: query: show partitions char_tbl1
 POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@char_tbl1
-gpa=2.5
-gpa=3.5
+gpa=2.5                                               
+gpa=3.5                                               
 PREHOOK: query: show partitions char_tbl2
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@char_tbl2
 POSTHOOK: query: show partitions char_tbl2
 POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@char_tbl2
-gpa=3
-gpa=3.5
+gpa=3    
+gpa=3.5  
 PREHOOK: query: explain select c1.name, c1.age, c1.gpa, c2.name, c2.age, c2.gpa from char_tbl1 c1 join char_tbl2 c2 on (c1.gpa = c2.gpa)
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select c1.name, c1.age, c1.gpa, c2.name, c2.age, c2.gpa from char_tbl1 c1 join char_tbl2 c2 on (c1.gpa = c2.gpa)
 POSTHOOK: type: QUERY
-Plan optimized by CBO.
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
-Vertex dependency in root stage
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: c1
+                  Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: name (type: string), age (type: int), gpa (type: char(50))
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col2 (type: char(50))
+                      sort order: +
+                      Map-reduce partition columns: _col2 (type: char(50))
+                      Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: string), _col1 (type: int)
+                    Select Operator
+                      expressions: _col2 (type: char(50))
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: char(50))
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+                        Dynamic Partitioning Event Operator
+                          Target Input: c2
+                          Partition key expr: gpa
+                          Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+                          Target column: gpa
+                          Target Vertex: Map 3
+            Execution mode: vectorized
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: c2
+                  Statistics: Num rows: 2 Data size: 203 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: name (type: string), age (type: int), gpa (type: char(5))
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 2 Data size: 203 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col2 (type: char(50))
+                      sort order: +
+                      Map-reduce partition columns: _col2 (type: char(50))
+                      Statistics: Num rows: 2 Data size: 203 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: string), _col1 (type: int), _col2 (type: char(5))
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 _col2 (type: char(50))
+                  1 _col2 (type: char(50))
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
-Stage-0
-   Fetch Operator
-      limit:-1
-      Stage-1
-         Reducer 2
-         File Output Operator [FS_10]
-            compressed:false
-            Statistics:Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE
-            table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}
-            Merge Join Operator [MERGEJOIN_21]
-            |  condition map:[{"":"Inner Join 0 to 1"}]
-            |  keys:{"0":"_col2 (type: char(50))","1":"_col2 (type: char(50))"}
-            |  outputColumnNames:["_col0","_col1","_col2","_col3","_col4","_col5"]
-            |  Statistics:Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE
-            |<-Map 1 [SIMPLE_EDGE] vectorized
-            |  Reduce Output Operator [RS_23]
-            |     key expressions:_col2 (type: char(50))
-            |     Map-reduce partition columns:_col2 (type: char(50))
-            |     sort order:+
-            |     Statistics:Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
-            |     value expressions:_col0 (type: string), _col1 (type: int)
-            |     Select Operator [OP_22]
-            |        outputColumnNames:["_col0","_col1","_col2"]
-            |        Statistics:Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
-            |        TableScan [TS_0]
-            |           alias:c1
-            |           Statistics:Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
-            |  Dynamic Partitioning Event Operator [EVENT_20]
-            |     Statistics:Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
-            |     Group By Operator [OP_25]
-            |        keys:_col0 (type: char(50))
-            |        outputColumnNames:["_col0"]
-            |        Statistics:Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
-            |        Select Operator [OP_24]
-            |           outputColumnNames:["_col0"]
-            |           Statistics:Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: NONE
-            |            Please refer to the previous Select Operator [OP_22]
-            |<-Map 3 [SIMPLE_EDGE] vectorized
-               Reduce Output Operator [RS_27]
-                  key expressions:_col2 (type: char(50))
-                  Map-reduce partition columns:_col2 (type: char(50))
-                  sort order:+
-                  Statistics:Num rows: 2 Data size: 203 Basic stats: COMPLETE Column stats: NONE
-                  value expressions:_col0 (type: string), _col1 (type: int), _col2 (type: char(5))
-                  Select Operator [OP_26]
-                     outputColumnNames:["_col0","_col1","_col2"]
-                     Statistics:Num rows: 2 Data size: 203 Basic stats: COMPLETE Column stats: NONE
-                     TableScan [TS_2]
-                        alias:c2
-                        Statistics:Num rows: 2 Data size: 203 Basic stats: COMPLETE Column stats: NONE
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
 
 PREHOOK: query: select c1.name, c1.age, c1.gpa, c2.name, c2.age, c2.gpa from char_tbl1 c1 join char_tbl2 c2 on (c1.gpa = c2.gpa)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@char_tbl1
-PREHOOK: Input: default@char_tbl1@gpa=2.5
-PREHOOK: Input: default@char_tbl1@gpa=3.5
+PREHOOK: Input: default@char_tbl1@gpa=2.5                                               
+PREHOOK: Input: default@char_tbl1@gpa=3.5                                               
 PREHOOK: Input: default@char_tbl2
-PREHOOK: Input: default@char_tbl2@gpa=3
-PREHOOK: Input: default@char_tbl2@gpa=3.5
+PREHOOK: Input: default@char_tbl2@gpa=3    
+PREHOOK: Input: default@char_tbl2@gpa=3.5  
 #### A masked pattern was here ####
 POSTHOOK: query: select c1.name, c1.age, c1.gpa, c2.name, c2.age, c2.gpa from char_tbl1 c1 join char_tbl2 c2 on (c1.gpa = c2.gpa)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@char_tbl1
-POSTHOOK: Input: default@char_tbl1@gpa=2.5
-POSTHOOK: Input: default@char_tbl1@gpa=3.5
+POSTHOOK: Input: default@char_tbl1@gpa=2.5                                               
+POSTHOOK: Input: default@char_tbl1@gpa=3.5                                               
 POSTHOOK: Input: default@char_tbl2
-POSTHOOK: Input: default@char_tbl2@gpa=3
-POSTHOOK: Input: default@char_tbl2@gpa=3.5
+POSTHOOK: Input: default@char_tbl2@gpa=3    
+POSTHOOK: Input: default@char_tbl2@gpa=3.5  
 #### A masked pattern was here ####
 alice underhill	46	3.5                                               	alice underhill	46	3.5  
 PREHOOK: query: select c1.name, c1.age, c1.gpa, c2.name, c2.age, c2.gpa from char_tbl1 c1 join char_tbl2 c2 on (c1.gpa = c2.gpa)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@char_tbl1
-PREHOOK: Input: default@char_tbl1@gpa=2.5
-PREHOOK: Input: default@char_tbl1@gpa=3.5
+PREHOOK: Input: default@char_tbl1@gpa=2.5                                               
+PREHOOK: Input: default@char_tbl1@gpa=3.5                                               
 PREHOOK: Input: default@char_tbl2
-PREHOOK: Input: default@char_tbl2@gpa=3
-PREHOOK: Input: default@char_tbl2@gpa=3.5
+PREHOOK: Input: default@char_tbl2@gpa=3    
+PREHOOK: Input: default@char_tbl2@gpa=3.5  
 #### A masked pattern was here ####
 POSTHOOK: query: select c1.name, c1.age, c1.gpa, c2.name, c2.age, c2.gpa from char_tbl1 c1 join char_tbl2 c2 on (c1.gpa = c2.gpa)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@char_tbl1
-POSTHOOK: Input: default@char_tbl1@gpa=2.5
-POSTHOOK: Input: default@char_tbl1@gpa=3.5
+POSTHOOK: Input: default@char_tbl1@gpa=2.5                                               
+POSTHOOK: Input: default@char_tbl1@gpa=3.5                                               
 POSTHOOK: Input: default@char_tbl2
-POSTHOOK: Input: default@char_tbl2@gpa=3
-POSTHOOK: Input: default@char_tbl2@gpa=3.5
+POSTHOOK: Input: default@char_tbl2@gpa=3    
+POSTHOOK: Input: default@char_tbl2@gpa=3.5  
 #### A masked pattern was here ####
 alice underhill	46	3.5                                               	alice underhill	46	3.5  

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/tez/vectorized_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/tez/vectorized_timestamp.q.out
new file mode 100644
index 0000000..d25032a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/tez/vectorized_timestamp.q.out
@@ -0,0 +1,258 @@
+PREHOOK: query: DROP TABLE IF EXISTS test
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS test
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE test(ts TIMESTAMP) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test
+POSTHOOK: query: CREATE TABLE test(ts TIMESTAMP) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test
+PREHOOK: query: INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test
+POSTHOOK: query: INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test
+POSTHOOK: Lineage: test.ts EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: EXPLAIN
+SELECT ts FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT ts FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test
+                  Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: ts (type: timestamp)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT ts FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT ts FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+9999-12-31 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test
+                  Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: ts (type: timestamp)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: min(_col0), max(_col0)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: timestamp), _col1 (type: timestamp)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0), max(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: timestamp), _col1 (type: timestamp), (_col1 - _col0) (type: interval_day_time)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT ts FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT ts FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test
+                  Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: ts (type: timestamp)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT ts FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT ts FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+9999-12-31 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test
+                  Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: ts (type: timestamp)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: min(_col0), max(_col0)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: timestamp), _col1 (type: timestamp)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0), max(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: timestamp), _col1 (type: timestamp), (_col1 - _col0) (type: interval_day_time)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999


[07/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
index 428ced7..9c7a83f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
@@ -34,32 +34,56 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
   DoubleColEqualLongScalar.class, DoubleColEqualDoubleScalar.class,
   LongScalarEqualLongColumn.class, LongScalarEqualDoubleColumn.class,
   DoubleScalarEqualLongColumn.class, DoubleScalarEqualDoubleColumn.class,
+
   StringGroupColEqualStringGroupColumn.class, FilterStringGroupColEqualStringGroupColumn.class,
   StringGroupColEqualStringScalar.class,
   StringGroupColEqualVarCharScalar.class, StringGroupColEqualCharScalar.class,
   StringScalarEqualStringGroupColumn.class,
   VarCharScalarEqualStringGroupColumn.class, CharScalarEqualStringGroupColumn.class,
+
   FilterStringGroupColEqualStringScalar.class, FilterStringScalarEqualStringGroupColumn.class,
   FilterStringGroupColEqualVarCharScalar.class, FilterVarCharScalarEqualStringGroupColumn.class,
   FilterStringGroupColEqualCharScalar.class, FilterCharScalarEqualStringGroupColumn.class,
+
   FilterLongColEqualLongColumn.class, FilterLongColEqualDoubleColumn.class,
   FilterDoubleColEqualLongColumn.class, FilterDoubleColEqualDoubleColumn.class,
   FilterLongColEqualLongScalar.class, FilterLongColEqualDoubleScalar.class,
   FilterDoubleColEqualLongScalar.class, FilterDoubleColEqualDoubleScalar.class,
   FilterLongScalarEqualLongColumn.class, FilterLongScalarEqualDoubleColumn.class,
   FilterDoubleScalarEqualLongColumn.class, FilterDoubleScalarEqualDoubleColumn.class,
+
   FilterDecimalColEqualDecimalColumn.class, FilterDecimalColEqualDecimalScalar.class,
   FilterDecimalScalarEqualDecimalColumn.class,
+
+  TimestampColEqualTimestampColumn.class,
   TimestampColEqualTimestampScalar.class, TimestampScalarEqualTimestampColumn.class,
+  TimestampColEqualLongColumn.class,
+  TimestampColEqualLongScalar.class, TimestampScalarEqualLongColumn.class,
+  TimestampColEqualDoubleColumn.class,
+  TimestampColEqualDoubleScalar.class, TimestampScalarEqualDoubleColumn.class,
+  LongColEqualTimestampColumn.class,
+  LongColEqualTimestampScalar.class, LongScalarEqualTimestampColumn.class,
+  DoubleColEqualTimestampColumn.class,
+  DoubleColEqualTimestampScalar.class, DoubleScalarEqualTimestampColumn.class,
+
+  FilterTimestampColEqualTimestampColumn.class,
   FilterTimestampColEqualTimestampScalar.class, FilterTimestampScalarEqualTimestampColumn.class,
-  TimestampColEqualLongScalar.class, LongScalarEqualTimestampColumn.class,
-  FilterTimestampColEqualLongScalar.class, FilterLongScalarEqualTimestampColumn.class,
-  TimestampColEqualDoubleScalar.class, DoubleScalarEqualTimestampColumn.class,
-  FilterTimestampColEqualDoubleScalar.class, FilterDoubleScalarEqualTimestampColumn.class,
+  FilterTimestampColEqualLongColumn.class,
+  FilterTimestampColEqualLongScalar.class, FilterTimestampScalarEqualLongColumn.class,
+  FilterTimestampColEqualDoubleColumn.class,
+  FilterTimestampColEqualDoubleScalar.class, FilterTimestampScalarEqualDoubleColumn.class,
+  FilterLongColEqualTimestampColumn.class,
+  FilterLongColEqualTimestampScalar.class, FilterLongScalarEqualTimestampColumn.class,
+  FilterDoubleColEqualTimestampColumn.class,
+  FilterDoubleColEqualTimestampScalar.class, FilterDoubleScalarEqualTimestampColumn.class,
+
   IntervalYearMonthScalarEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarEqualIntervalYearMonthColumn.class,
   IntervalYearMonthColEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColEqualIntervalYearMonthScalar.class,
+
+  IntervalDayTimeColEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeColEqualIntervalDayTimeColumn.class,
   IntervalDayTimeScalarEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarEqualIntervalDayTimeColumn.class,
   IntervalDayTimeColEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColEqualIntervalDayTimeScalar.class,
+
   DateColEqualDateScalar.class,FilterDateColEqualDateScalar.class,
   DateScalarEqualDateColumn.class,FilterDateScalarEqualDateColumn.class,
   })

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
index d9556cc..e461736 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
@@ -35,32 +35,56 @@ import org.apache.hadoop.io.Text;
   DoubleColGreaterEqualLongScalar.class, DoubleColGreaterEqualDoubleScalar.class,
   LongScalarGreaterEqualLongColumn.class, LongScalarGreaterEqualDoubleColumn.class,
   DoubleScalarGreaterEqualLongColumn.class, DoubleScalarGreaterEqualDoubleColumn.class,
+
   StringGroupColGreaterEqualStringGroupColumn.class, FilterStringGroupColGreaterEqualStringGroupColumn.class,
   StringGroupColGreaterEqualStringScalar.class,
   StringGroupColGreaterEqualVarCharScalar.class, StringGroupColGreaterEqualCharScalar.class,
   StringScalarGreaterEqualStringGroupColumn.class,
   VarCharScalarGreaterEqualStringGroupColumn.class, CharScalarGreaterEqualStringGroupColumn.class,
+
   FilterStringGroupColGreaterEqualStringScalar.class, FilterStringScalarGreaterEqualStringGroupColumn.class,
   FilterStringGroupColGreaterEqualVarCharScalar.class, FilterVarCharScalarGreaterEqualStringGroupColumn.class,
   FilterStringGroupColGreaterEqualCharScalar.class, FilterCharScalarGreaterEqualStringGroupColumn.class,
+
   FilterLongColGreaterEqualLongColumn.class, FilterLongColGreaterEqualDoubleColumn.class,
   FilterDoubleColGreaterEqualLongColumn.class, FilterDoubleColGreaterEqualDoubleColumn.class,
   FilterLongColGreaterEqualLongScalar.class, FilterLongColGreaterEqualDoubleScalar.class,
   FilterDoubleColGreaterEqualLongScalar.class, FilterDoubleColGreaterEqualDoubleScalar.class,
   FilterLongScalarGreaterEqualLongColumn.class, FilterLongScalarGreaterEqualDoubleColumn.class,
   FilterDoubleScalarGreaterEqualLongColumn.class, FilterDoubleScalarGreaterEqualDoubleColumn.class,
+
   FilterDecimalColGreaterEqualDecimalColumn.class, FilterDecimalColGreaterEqualDecimalScalar.class,
   FilterDecimalScalarGreaterEqualDecimalColumn.class,
+
+  TimestampColGreaterEqualTimestampColumn.class,
   TimestampColGreaterEqualTimestampScalar.class, TimestampScalarGreaterEqualTimestampColumn.class,
+  TimestampColGreaterEqualLongColumn.class,
+  TimestampColGreaterEqualLongScalar.class, TimestampScalarGreaterEqualLongColumn.class,
+  TimestampColGreaterEqualDoubleColumn.class,
+  TimestampColGreaterEqualDoubleScalar.class, TimestampScalarGreaterEqualDoubleColumn.class,
+  LongColGreaterEqualTimestampColumn.class,
+  LongColGreaterEqualTimestampScalar.class, LongScalarGreaterEqualTimestampColumn.class,
+  DoubleColGreaterEqualTimestampColumn.class,
+  DoubleColGreaterEqualTimestampScalar.class, DoubleScalarGreaterEqualTimestampColumn.class,
+
+  FilterTimestampColGreaterEqualTimestampColumn.class,
   FilterTimestampColGreaterEqualTimestampScalar.class, FilterTimestampScalarGreaterEqualTimestampColumn.class,
-  TimestampColGreaterEqualLongScalar.class, LongScalarGreaterEqualTimestampColumn.class,
-  FilterTimestampColGreaterEqualLongScalar.class, FilterLongScalarGreaterEqualTimestampColumn.class,
-  TimestampColGreaterEqualDoubleScalar.class, DoubleScalarGreaterEqualTimestampColumn.class,
-  FilterTimestampColGreaterEqualDoubleScalar.class, FilterDoubleScalarGreaterEqualTimestampColumn.class,
+  FilterTimestampColGreaterEqualLongColumn.class,
+  FilterTimestampColGreaterEqualLongScalar.class, FilterTimestampScalarGreaterEqualLongColumn.class,
+  FilterTimestampColGreaterEqualDoubleColumn.class,
+  FilterTimestampColGreaterEqualDoubleScalar.class, FilterTimestampScalarGreaterEqualDoubleColumn.class,
+  FilterLongColGreaterEqualTimestampColumn.class,
+  FilterLongColGreaterEqualTimestampScalar.class, FilterLongScalarGreaterEqualTimestampColumn.class,
+  FilterDoubleColGreaterEqualTimestampColumn.class,
+  FilterDoubleColGreaterEqualTimestampScalar.class, FilterDoubleScalarGreaterEqualTimestampColumn.class,
+
   IntervalYearMonthScalarGreaterEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarGreaterEqualIntervalYearMonthColumn.class,
   IntervalYearMonthColGreaterEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColGreaterEqualIntervalYearMonthScalar.class,
+
+  IntervalDayTimeColGreaterEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeColGreaterEqualIntervalDayTimeColumn.class,
   IntervalDayTimeScalarGreaterEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarGreaterEqualIntervalDayTimeColumn.class,
   IntervalDayTimeColGreaterEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColGreaterEqualIntervalDayTimeScalar.class,
+
   DateColGreaterEqualDateScalar.class,FilterDateColGreaterEqualDateScalar.class,
   DateScalarGreaterEqualDateColumn.class,FilterDateScalarGreaterEqualDateColumn.class,
   })

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
index 1d9eaf6..1bd127e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
@@ -35,14 +35,17 @@ import org.apache.hadoop.io.Text;
   DoubleColLessEqualLongScalar.class, DoubleColLessEqualDoubleScalar.class,
   LongScalarLessEqualLongColumn.class, LongScalarLessEqualDoubleColumn.class,
   DoubleScalarLessEqualLongColumn.class, DoubleScalarLessEqualDoubleColumn.class,
+
   StringGroupColLessEqualStringGroupColumn.class, FilterStringGroupColLessEqualStringGroupColumn.class,
   StringGroupColLessEqualStringScalar.class,
   StringGroupColLessEqualVarCharScalar.class, StringGroupColLessEqualCharScalar.class,
   StringScalarLessEqualStringGroupColumn.class,
   VarCharScalarLessEqualStringGroupColumn.class, CharScalarLessEqualStringGroupColumn.class,
+
   FilterStringGroupColLessEqualStringScalar.class, FilterStringScalarLessEqualStringGroupColumn.class,
   FilterStringGroupColLessEqualVarCharScalar.class, FilterVarCharScalarLessEqualStringGroupColumn.class,
   FilterStringGroupColLessEqualCharScalar.class, FilterCharScalarLessEqualStringGroupColumn.class,
+
   FilterLongColLessEqualLongColumn.class, FilterLongColLessEqualDoubleColumn.class,
   FilterDoubleColLessEqualLongColumn.class, FilterDoubleColLessEqualDoubleColumn.class,
   FilterLongColLessEqualLongScalar.class, FilterLongColLessEqualDoubleScalar.class,
@@ -51,16 +54,36 @@ import org.apache.hadoop.io.Text;
   FilterDoubleScalarLessEqualLongColumn.class, FilterDoubleScalarLessEqualDoubleColumn.class,
   FilterDecimalColLessEqualDecimalColumn.class, FilterDecimalColLessEqualDecimalScalar.class,
   FilterDecimalScalarLessEqualDecimalColumn.class,
+
+  TimestampColLessEqualTimestampColumn.class,
   TimestampColLessEqualTimestampScalar.class, TimestampScalarLessEqualTimestampColumn.class,
+  TimestampColLessEqualLongColumn.class,
+  TimestampColLessEqualLongScalar.class, TimestampScalarLessEqualLongColumn.class,
+  TimestampColLessEqualDoubleColumn.class,
+  TimestampColLessEqualDoubleScalar.class, TimestampScalarLessEqualDoubleColumn.class,
+  LongColLessEqualTimestampColumn.class,
+  LongColLessEqualTimestampScalar.class, LongScalarLessEqualTimestampColumn.class,
+  DoubleColLessEqualTimestampColumn.class,
+  DoubleColLessEqualTimestampScalar.class, DoubleScalarLessEqualTimestampColumn.class,
+
+  FilterTimestampColLessEqualTimestampColumn.class,
   FilterTimestampColLessEqualTimestampScalar.class, FilterTimestampScalarLessEqualTimestampColumn.class,
-  TimestampColLessEqualLongScalar.class, LongScalarLessEqualTimestampColumn.class,
-  FilterTimestampColLessEqualLongScalar.class, FilterLongScalarLessEqualTimestampColumn.class,
-  TimestampColLessEqualDoubleScalar.class, DoubleScalarLessEqualTimestampColumn.class,
-  FilterTimestampColLessEqualDoubleScalar.class, FilterDoubleScalarLessEqualTimestampColumn.class,
+  FilterTimestampColLessEqualLongColumn.class,
+  FilterTimestampColLessEqualLongScalar.class, FilterTimestampScalarLessEqualLongColumn.class,
+  FilterTimestampColLessEqualDoubleColumn.class,
+  FilterTimestampColLessEqualDoubleScalar.class, FilterTimestampScalarLessEqualDoubleColumn.class,
+  FilterLongColLessEqualTimestampColumn.class,
+  FilterLongColLessEqualTimestampScalar.class, FilterLongScalarLessEqualTimestampColumn.class,
+  FilterDoubleColLessEqualTimestampColumn.class,
+  FilterDoubleColLessEqualTimestampScalar.class, FilterDoubleScalarLessEqualTimestampColumn.class,
+
   IntervalYearMonthScalarLessEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarLessEqualIntervalYearMonthColumn.class,
   IntervalYearMonthColLessEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColLessEqualIntervalYearMonthScalar.class,
+
+  IntervalDayTimeColLessEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeColLessEqualIntervalDayTimeColumn.class,
   IntervalDayTimeScalarLessEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarLessEqualIntervalDayTimeColumn.class,
   IntervalDayTimeColLessEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColLessEqualIntervalDayTimeScalar.class,
+
   DateColLessEqualDateScalar.class,FilterDateColLessEqualDateScalar.class,
   DateScalarLessEqualDateColumn.class,FilterDateScalarLessEqualDateColumn.class,
   })

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
index 8e1f2b1..c266a0d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
@@ -35,14 +35,17 @@ import org.apache.hadoop.io.Text;
   DoubleColGreaterLongScalar.class, DoubleColGreaterDoubleScalar.class,
   LongScalarGreaterLongColumn.class, LongScalarGreaterDoubleColumn.class,
   DoubleScalarGreaterLongColumn.class, DoubleScalarGreaterDoubleColumn.class,
+
   StringGroupColGreaterStringGroupColumn.class, FilterStringGroupColGreaterStringGroupColumn.class,
   StringGroupColGreaterStringScalar.class,
   StringGroupColGreaterVarCharScalar.class, StringGroupColGreaterCharScalar.class,
   StringScalarGreaterStringGroupColumn.class,
   VarCharScalarGreaterStringGroupColumn.class, CharScalarGreaterStringGroupColumn.class,
+
   FilterStringGroupColGreaterStringScalar.class, FilterStringScalarGreaterStringGroupColumn.class,
   FilterStringGroupColGreaterVarCharScalar.class, FilterVarCharScalarGreaterStringGroupColumn.class,
   FilterStringGroupColGreaterCharScalar.class, FilterCharScalarGreaterStringGroupColumn.class,
+
   FilterLongColGreaterLongColumn.class, FilterLongColGreaterDoubleColumn.class,
   FilterDoubleColGreaterLongColumn.class, FilterDoubleColGreaterDoubleColumn.class,
   FilterLongColGreaterLongScalar.class, FilterLongColGreaterDoubleScalar.class,
@@ -51,16 +54,36 @@ import org.apache.hadoop.io.Text;
   FilterDoubleScalarGreaterLongColumn.class, FilterDoubleScalarGreaterDoubleColumn.class,
   FilterDecimalColGreaterDecimalColumn.class, FilterDecimalColGreaterDecimalScalar.class,
   FilterDecimalScalarGreaterDecimalColumn.class,
+
+  TimestampColGreaterTimestampColumn.class,
   TimestampColGreaterTimestampScalar.class, TimestampScalarGreaterTimestampColumn.class,
+  TimestampColGreaterLongColumn.class,
+  TimestampColGreaterLongScalar.class, TimestampScalarGreaterLongColumn.class,
+  TimestampColGreaterDoubleColumn.class,
+  TimestampColGreaterDoubleScalar.class, TimestampScalarGreaterDoubleColumn.class,
+  LongColGreaterTimestampColumn.class,
+  LongColGreaterTimestampScalar.class, LongScalarGreaterTimestampColumn.class,
+  DoubleColGreaterTimestampColumn.class,
+  DoubleColGreaterTimestampScalar.class, DoubleScalarGreaterTimestampColumn.class,
+
+  FilterTimestampColGreaterTimestampColumn.class,
   FilterTimestampColGreaterTimestampScalar.class, FilterTimestampScalarGreaterTimestampColumn.class,
-  TimestampColGreaterLongScalar.class, LongScalarGreaterTimestampColumn.class,
-  FilterTimestampColGreaterLongScalar.class, FilterLongScalarGreaterTimestampColumn.class,
-  TimestampColGreaterDoubleScalar.class, DoubleScalarGreaterTimestampColumn.class,
-  FilterTimestampColGreaterDoubleScalar.class, FilterDoubleScalarGreaterTimestampColumn.class,
+  FilterTimestampColGreaterLongColumn.class,
+  FilterTimestampColGreaterLongScalar.class, FilterTimestampScalarGreaterLongColumn.class,
+  FilterTimestampColGreaterDoubleColumn.class,
+  FilterTimestampColGreaterDoubleScalar.class, FilterTimestampScalarGreaterDoubleColumn.class,
+  FilterLongColGreaterTimestampColumn.class,
+  FilterLongColGreaterTimestampScalar.class, FilterLongScalarGreaterTimestampColumn.class,
+  FilterDoubleColGreaterTimestampColumn.class,
+  FilterDoubleColGreaterTimestampScalar.class, FilterDoubleScalarGreaterTimestampColumn.class,
+
   IntervalYearMonthScalarGreaterIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarGreaterIntervalYearMonthColumn.class,
   IntervalYearMonthColGreaterIntervalYearMonthScalar.class, FilterIntervalYearMonthColGreaterIntervalYearMonthScalar.class,
+
+  IntervalDayTimeColGreaterIntervalDayTimeColumn.class, FilterIntervalDayTimeColGreaterIntervalDayTimeColumn.class,
   IntervalDayTimeScalarGreaterIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarGreaterIntervalDayTimeColumn.class,
   IntervalDayTimeColGreaterIntervalDayTimeScalar.class, FilterIntervalDayTimeColGreaterIntervalDayTimeScalar.class,
+
   DateColGreaterDateScalar.class,FilterDateColGreaterDateScalar.class,
   DateScalarGreaterDateColumn.class,FilterDateScalarGreaterDateColumn.class,
   })

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
index 101b348..bcdc46e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.io.Text;
     DoubleColLessLongScalar.class, DoubleColLessDoubleScalar.class,
     LongScalarLessLongColumn.class, LongScalarLessDoubleColumn.class,
     DoubleScalarLessLongColumn.class, DoubleScalarLessDoubleColumn.class,
+
     StringGroupColLessStringGroupColumn.class, FilterStringGroupColLessStringGroupColumn.class,
     StringGroupColLessStringScalar.class,
     StringGroupColLessVarCharScalar.class, StringGroupColLessCharScalar.class,
@@ -43,24 +44,46 @@ import org.apache.hadoop.io.Text;
     FilterStringGroupColLessStringScalar.class, FilterStringScalarLessStringGroupColumn.class,
     FilterStringGroupColLessVarCharScalar.class, FilterVarCharScalarLessStringGroupColumn.class,
     FilterStringGroupColLessCharScalar.class, FilterCharScalarLessStringGroupColumn.class,
+
     FilterLongColLessLongColumn.class, FilterLongColLessDoubleColumn.class,
     FilterDoubleColLessLongColumn.class, FilterDoubleColLessDoubleColumn.class,
     FilterLongColLessLongScalar.class, FilterLongColLessDoubleScalar.class,
     FilterDoubleColLessLongScalar.class, FilterDoubleColLessDoubleScalar.class,
     FilterLongScalarLessLongColumn.class, FilterLongScalarLessDoubleColumn.class,
     FilterDoubleScalarLessLongColumn.class, FilterDoubleScalarLessDoubleColumn.class,
+
     FilterDecimalColLessDecimalColumn.class, FilterDecimalColLessDecimalScalar.class,
     FilterDecimalScalarLessDecimalColumn.class,
+
+    TimestampColLessTimestampColumn.class,
     TimestampColLessTimestampScalar.class, TimestampScalarLessTimestampColumn.class,
+    TimestampColLessLongColumn.class,
+    TimestampColLessLongScalar.class, TimestampScalarLessLongColumn.class,
+    TimestampColLessDoubleColumn.class,
+    TimestampColLessDoubleScalar.class, TimestampScalarLessDoubleColumn.class,
+    LongColLessTimestampColumn.class,
+    LongColLessTimestampScalar.class, LongScalarLessTimestampColumn.class,
+    DoubleColLessTimestampColumn.class,
+    DoubleColLessTimestampScalar.class, DoubleScalarLessTimestampColumn.class,
+
+    FilterTimestampColLessTimestampColumn.class,
     FilterTimestampColLessTimestampScalar.class, FilterTimestampScalarLessTimestampColumn.class,
-    TimestampColLessLongScalar.class, LongScalarLessTimestampColumn.class,
-    FilterTimestampColLessLongScalar.class, FilterLongScalarLessTimestampColumn.class,
-    TimestampColLessDoubleScalar.class, DoubleScalarLessTimestampColumn.class,
-    FilterTimestampColLessDoubleScalar.class, FilterDoubleScalarLessTimestampColumn.class,
+    FilterTimestampColLessLongColumn.class,
+    FilterTimestampColLessLongScalar.class, FilterTimestampScalarLessLongColumn.class,
+    FilterTimestampColLessDoubleColumn.class,
+    FilterTimestampColLessDoubleScalar.class, FilterTimestampScalarLessDoubleColumn.class,
+    FilterLongColLessTimestampColumn.class,
+    FilterLongColLessTimestampScalar.class, FilterLongScalarLessTimestampColumn.class,
+    FilterDoubleColLessTimestampColumn.class,
+    FilterDoubleColLessTimestampScalar.class, FilterDoubleScalarLessTimestampColumn.class,
+
     IntervalYearMonthScalarLessIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarLessIntervalYearMonthColumn.class,
     IntervalYearMonthColLessIntervalYearMonthScalar.class, FilterIntervalYearMonthColLessIntervalYearMonthScalar.class,
+
+    IntervalDayTimeColLessIntervalDayTimeColumn.class, FilterIntervalDayTimeColLessIntervalDayTimeColumn.class,
     IntervalDayTimeScalarLessIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarLessIntervalDayTimeColumn.class,
     IntervalDayTimeColLessIntervalDayTimeScalar.class, FilterIntervalDayTimeColLessIntervalDayTimeScalar.class,
+
     DateColLessDateScalar.class,FilterDateColLessDateScalar.class,
     DateScalarLessDateColumn.class,FilterDateScalarLessDateColumn.class,
     })

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
index d6a0c58..31d8bf5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DateColSubtractDateColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DateColSubtractDateScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DateScalarSubtractDateColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
index b5da57a..43f2f44 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
@@ -34,32 +34,56 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
   DoubleColNotEqualLongScalar.class, DoubleColNotEqualDoubleScalar.class,
   LongScalarNotEqualLongColumn.class, LongScalarNotEqualDoubleColumn.class,
   DoubleScalarNotEqualLongColumn.class, DoubleScalarNotEqualDoubleColumn.class,
+
   StringGroupColNotEqualStringGroupColumn.class, FilterStringGroupColNotEqualStringGroupColumn.class,
   StringGroupColNotEqualStringScalar.class,
   StringGroupColNotEqualVarCharScalar.class, StringGroupColNotEqualCharScalar.class,
   StringScalarNotEqualStringGroupColumn.class,
-  VarCharScalarNotEqualStringGroupColumn.class, CharScalarNotEqualStringGroupColumn.class, 
+  VarCharScalarNotEqualStringGroupColumn.class, CharScalarNotEqualStringGroupColumn.class,
+
   FilterStringGroupColNotEqualStringScalar.class, FilterStringScalarNotEqualStringGroupColumn.class,
   FilterStringGroupColNotEqualVarCharScalar.class, FilterVarCharScalarNotEqualStringGroupColumn.class,
   FilterStringGroupColNotEqualCharScalar.class, FilterCharScalarNotEqualStringGroupColumn.class,
+
   FilterLongColNotEqualLongColumn.class, FilterLongColNotEqualDoubleColumn.class,
   FilterDoubleColNotEqualLongColumn.class, FilterDoubleColNotEqualDoubleColumn.class,
   FilterLongColNotEqualLongScalar.class, FilterLongColNotEqualDoubleScalar.class,
   FilterDoubleColNotEqualLongScalar.class, FilterDoubleColNotEqualDoubleScalar.class,
   FilterLongScalarNotEqualLongColumn.class, FilterLongScalarNotEqualDoubleColumn.class,
   FilterDoubleScalarNotEqualLongColumn.class, FilterDoubleScalarNotEqualDoubleColumn.class,
+
   FilterDecimalColNotEqualDecimalColumn.class, FilterDecimalColNotEqualDecimalScalar.class,
   FilterDecimalScalarNotEqualDecimalColumn.class,
+
+  TimestampColNotEqualTimestampColumn.class,
   TimestampColNotEqualTimestampScalar.class, TimestampScalarNotEqualTimestampColumn.class,
+  TimestampColNotEqualLongColumn.class,
+  TimestampColNotEqualLongScalar.class, TimestampScalarNotEqualLongColumn.class,
+  TimestampColNotEqualDoubleColumn.class,
+  TimestampColNotEqualDoubleScalar.class, TimestampScalarNotEqualDoubleColumn.class,
+  LongColNotEqualTimestampColumn.class,
+  LongColNotEqualTimestampScalar.class, LongScalarNotEqualTimestampColumn.class,
+  DoubleColNotEqualTimestampColumn.class,
+  DoubleColNotEqualTimestampScalar.class, DoubleScalarNotEqualTimestampColumn.class,
+
+  FilterTimestampColNotEqualTimestampColumn.class,
   FilterTimestampColNotEqualTimestampScalar.class, FilterTimestampScalarNotEqualTimestampColumn.class,
-  TimestampColNotEqualLongScalar.class, LongScalarNotEqualTimestampColumn.class,
-  FilterTimestampColNotEqualLongScalar.class, FilterLongScalarNotEqualTimestampColumn.class,
-  TimestampColNotEqualDoubleScalar.class, DoubleScalarNotEqualTimestampColumn.class,
-  FilterTimestampColNotEqualDoubleScalar.class, FilterDoubleScalarNotEqualTimestampColumn.class,
+  FilterTimestampColNotEqualLongColumn.class,
+  FilterTimestampColNotEqualLongScalar.class, FilterTimestampScalarNotEqualLongColumn.class,
+  FilterTimestampColNotEqualDoubleColumn.class,
+  FilterTimestampColNotEqualDoubleScalar.class, FilterTimestampScalarNotEqualDoubleColumn.class,
+  FilterLongColNotEqualTimestampColumn.class,
+  FilterLongColNotEqualTimestampScalar.class, FilterLongScalarNotEqualTimestampColumn.class,
+  FilterDoubleColNotEqualTimestampColumn.class,
+  FilterDoubleColNotEqualTimestampScalar.class, FilterDoubleScalarNotEqualTimestampColumn.class,
+
   IntervalYearMonthScalarNotEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarNotEqualIntervalYearMonthColumn.class,
   IntervalYearMonthColNotEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColNotEqualIntervalYearMonthScalar.class,
+
+  IntervalDayTimeColNotEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeColNotEqualIntervalDayTimeColumn.class,
   IntervalDayTimeScalarNotEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarNotEqualIntervalDayTimeColumn.class,
   IntervalDayTimeColNotEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColNotEqualIntervalDayTimeScalar.class,
+
   DateColNotEqualDateScalar.class,FilterDateColNotEqualDateScalar.class,
   DateScalarNotEqualDateColumn.class,FilterDateScalarNotEqualDateColumn.class,
   })

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
index 24b49a0..b7fb892 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
@@ -25,8 +25,8 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToTimestamp;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToTimestampViaDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToTimestampViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDoubleToTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastLongToTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -47,8 +47,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
  */
 @Description(name = "timestamp",
 value = "cast(date as timestamp) - Returns timestamp")
-@VectorizedExpressions({CastLongToTimestampViaLongToLong.class,
-  CastDoubleToTimestampViaDoubleToLong.class, CastDecimalToTimestamp.class})
+@VectorizedExpressions({CastLongToTimestamp.class,
+  CastDoubleToTimestamp.class, CastDecimalToTimestamp.class})
 public class GenericUDFTimestamp extends GenericUDF {
 
   private transient PrimitiveObjectInspector argumentOI;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java
index 0613005..7e8a472 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToDate.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastLongToDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToDate;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToDate;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -41,7 +42,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
     + "Example:\n "
     + "  > SELECT CAST('2009-01-01' AS DATE) FROM src LIMIT 1;\n"
     + "  '2009-01-01'")
-@VectorizedExpressions({CastStringToDate.class, CastLongToDate.class})
+@VectorizedExpressions({CastStringToDate.class, CastLongToDate.class, CastTimestampToDate.class})
 public class GenericUDFToDate extends GenericUDF {
 
   private transient PrimitiveObjectInspector argumentOI;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
index 65a2297..819de77 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
@@ -27,8 +27,9 @@ import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -50,7 +51,7 @@ import org.apache.hadoop.io.Text;
 @Description(name = "to_unix_timestamp",
     value = "_FUNC_(date[, pattern]) - Returns the UNIX timestamp",
     extended = "Converts the specified time to number of seconds since 1970-01-01.")
-@VectorizedExpressions({VectorUDFUnixTimeStampLong.class, VectorUDFUnixTimeStampString.class})
+@VectorizedExpressions({VectorUDFUnixTimeStampDate.class, VectorUDFUnixTimeStampString.class, VectorUDFUnixTimeStampTimestamp.class})
 public class GenericUDFToUnixTimeStamp extends GenericUDF {
 
   private transient DateObjectInspector inputDateOI;
@@ -152,10 +153,14 @@ public class GenericUDFToUnixTimeStamp extends GenericUDF {
       return retValue;
     }
     Timestamp timestamp = inputTimestampOI.getPrimitiveJavaObject(arguments[0].get());
-    retValue.set(timestamp.getTime() / 1000);
+    setValueFromTs(retValue, timestamp);
     return retValue;
   }
 
+  protected static void setValueFromTs(LongWritable value, Timestamp timestamp) {
+    value.set(timestamp.getTime() / 1000);
+  }
+
   @Override
   public String getDisplayString(String[] children) {
     StringBuilder sb = new StringBuilder(32);

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
index ff28995..98b1ded 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
@@ -102,24 +102,95 @@ public class DateTimeMath {
       return null;
     }
 
+    Timestamp tsResult = new Timestamp(0);
+    add(ts, interval, tsResult);
+
+    return tsResult;
+  }
+
+  public boolean add(Timestamp ts, HiveIntervalYearMonth interval, Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
     // Attempt to match Oracle semantics for timestamp arithmetic,
     // where timestamp arithmetic is done in UTC, then converted back to local timezone
     long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
-    Timestamp tsResult = new Timestamp(resultMillis);
-    tsResult.setNanos(ts.getNanos());
+    result.setTime(resultMillis);
+    result.setNanos(ts.getNanos());
+
+    return true;
+  }
+
+  public Timestamp add(HiveIntervalYearMonth interval, Timestamp ts) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    Timestamp tsResult = new Timestamp(0);
+    add(interval, ts, tsResult);
 
     return tsResult;
   }
 
+  public boolean add(HiveIntervalYearMonth interval, Timestamp ts, Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
+    // Attempt to match Oracle semantics for timestamp arithmetic,
+    // where timestamp arithmetic is done in UTC, then converted back to local timezone
+    long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
+    result.setTime(resultMillis);
+    result.setNanos(ts.getNanos());
+
+    return true;
+  }
+
   public Date add(Date dt, HiveIntervalYearMonth interval) {
     if (dt == null || interval == null) {
       return null;
     }
 
+    Date dtResult = new Date(0);
+    add(dt, interval, dtResult);
+
+    return dtResult;
+  }
+
+  public boolean add(Date dt, HiveIntervalYearMonth interval, Date result) {
+    if (dt == null || interval == null) {
+      return false;
+    }
+
+    // Since Date millis value is in local timezone representation, do date arithmetic
+    // using local timezone so the time remains at the start of the day.
+    long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
+    result.setTime(resultMillis);
+    return true;
+  }
+
+  public Date add(HiveIntervalYearMonth interval, Date dt) {
+    if (dt == null || interval == null) {
+      return null;
+    }
+
+    Date dtResult = new Date(0);
+    add(interval, dt, dtResult);
+
+    return dtResult;
+  }
+
+  public boolean add(HiveIntervalYearMonth interval, Date dt, Date result) {
+    if (dt == null || interval == null) {
+      return false;
+    }
+
     // Since Date millis value is in local timezone representation, do date arithmetic
     // using local timezone so the time remains at the start of the day.
     long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
-    return new Date(resultMillis);
+    result.setTime(resultMillis);
+    return true;
   }
 
   public HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
@@ -136,14 +207,36 @@ public class DateTimeMath {
     if (left == null || right == null) {
       return null;
     }
-    return add(left, right.negate());
+
+    Timestamp tsResult = new Timestamp(0);
+    subtract(left, right, tsResult);
+
+    return tsResult;
+  }
+
+  public boolean subtract(Timestamp left, HiveIntervalYearMonth right, Timestamp result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
   }
 
   public Date subtract(Date left, HiveIntervalYearMonth right) {
     if (left == null || right == null) {
       return null;
     }
-    return add(left, right.negate());
+
+    Date dtResult = new Date(0);
+    subtract(left, right, dtResult);
+
+    return dtResult;
+  }
+
+  public boolean subtract(Date left, HiveIntervalYearMonth right, Date result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
   }
 
   public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
@@ -162,26 +255,74 @@ public class DateTimeMath {
       return null;
     }
 
+    Timestamp tsResult = new Timestamp(0);
+    add(ts, interval, tsResult);
+
+    return tsResult;
+  }
+
+  public boolean add(Timestamp ts, HiveIntervalDayTime interval,
+      Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
     nanosResult.addNanos(ts.getNanos(), interval.getNanos());
 
     long newMillis = ts.getTime()
         + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
-    Timestamp tsResult = new Timestamp(newMillis);
-    tsResult.setNanos(nanosResult.nanos);
+    result.setTime(newMillis);
+    result.setNanos(nanosResult.nanos);
+    return true;
+  }
+
+  public Timestamp add(HiveIntervalDayTime interval, Timestamp ts) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    Timestamp tsResult = new Timestamp(0);
+    add(interval, ts, tsResult);
     return tsResult;
   }
 
+  public boolean add(HiveIntervalDayTime interval, Timestamp ts,
+      Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
+    nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+
+    long newMillis = ts.getTime()
+        + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
+    result.setTime(newMillis);
+    result.setNanos(nanosResult.nanos);
+    return true;
+  }
+
   public HiveIntervalDayTime add(HiveIntervalDayTime left, HiveIntervalDayTime right) {
-    HiveIntervalDayTime result = null;
     if (left == null || right == null) {
       return null;
     }
 
+    HiveIntervalDayTime result = new HiveIntervalDayTime();
+    add(left, right, result);
+ 
+    return result;
+  }
+
+  public boolean add(HiveIntervalDayTime left, HiveIntervalDayTime right,
+      HiveIntervalDayTime result) {
+    if (left == null || right == null) {
+      return false;
+    }
+
     nanosResult.addNanos(left.getNanos(), right.getNanos());
 
     long totalSeconds = left.getTotalSeconds() + right.getTotalSeconds() + nanosResult.seconds;
-    result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos);
-    return result;
+    result.set(totalSeconds, nanosResult.nanos);
+    return true;
   }
 
   public Timestamp subtract(Timestamp left, HiveIntervalDayTime right) {
@@ -191,6 +332,13 @@ public class DateTimeMath {
     return add(left, right.negate());
   }
 
+  public boolean subtract(Timestamp left, HiveIntervalDayTime right, Timestamp result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
+  }
+
   public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) {
     if (left == null || right == null) {
       return null;
@@ -198,17 +346,36 @@ public class DateTimeMath {
     return add(left, right.negate());
   }
 
+  public boolean subtract(HiveIntervalDayTime left, HiveIntervalDayTime right,
+      HiveIntervalDayTime result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
+  }
+
   public HiveIntervalDayTime subtract(Timestamp left, Timestamp right) {
-    HiveIntervalDayTime result = null;
     if (left == null || right == null) {
       return null;
     }
 
+    HiveIntervalDayTime result = new HiveIntervalDayTime();
+    subtract(left, right, result);
+
+    return result;
+  }
+
+  public boolean subtract(Timestamp left, Timestamp right,
+      HiveIntervalDayTime result) {
+    if (left == null || right == null) {
+      return false;
+    }
+
     nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
 
     long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime())
         - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds;
-    result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos);
-    return result;
+    result.set(totalSeconds, nanosResult.nanos);
+    return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java
index 6f5d1a0..2d4baa0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -258,7 +259,7 @@ public class RandomRowObjectSource {
     case BINARY:
       return getRandBinary(r, 1 + r.nextInt(100));
     case TIMESTAMP:
-      return getRandTimestamp(r);
+      return RandomTypeUtil.getRandTimestamp(r);
     case INTERVAL_YEAR_MONTH:
       return getRandIntervalYearMonth(r);
     case INTERVAL_DAY_TIME:
@@ -355,24 +356,6 @@ public class RandomRowObjectSource {
     return dateVal;
   }
 
-  public static Timestamp getRandTimestamp(Random r) {
-    String optionalNanos = "";
-    if (r.nextInt(2) == 1) {
-      optionalNanos = String.format(".%09d",
-          Integer.valueOf(0 + r.nextInt(DateUtils.NANOS_PER_SEC)));
-    }
-    String timestampStr = String.format("%d-%02d-%02d %02d:%02d:%02d%s",
-        Integer.valueOf(1970 + r.nextInt(200)),  // year
-        Integer.valueOf(1 + r.nextInt(12)),      // month
-        Integer.valueOf(1 + r.nextInt(28)),      // day
-        Integer.valueOf(0 + r.nextInt(24)),      // hour
-        Integer.valueOf(0 + r.nextInt(60)),      // minute
-        Integer.valueOf(0 + r.nextInt(60)),      // second
-        optionalNanos);
-    Timestamp timestampVal = Timestamp.valueOf(timestampStr);
-    return timestampVal;
-  }
-
   public static HiveIntervalYearMonth getRandIntervalYearMonth(Random r) {
     String yearMonthSignStr = r.nextInt(2) == 0 ? "" : "-";
     String intervalYearMonthStr = String.format("%s%d-%d",

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
new file mode 100644
index 0000000..6c46257
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector;
+
+import org.junit.Test;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.sql.Timestamp;
+import java.util.Date;
+import java.util.Random;
+
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+import static org.junit.Assert.*;
+
+/**
+ * Test for ListColumnVector
+ */
+public class TestTimestampWritableAndColumnVector {
+
+  private static int TEST_COUNT = 5000;
+
+  private static int fake = 0;
+
+  @Test
+  public void testDouble() throws Exception {
+
+    Random r = new Random(1234);
+    TimestampColumnVector timestampColVector = new TimestampColumnVector();
+    Timestamp[] randTimestamps = new Timestamp[VectorizedRowBatch.DEFAULT_SIZE];
+
+    for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
+      Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
+      randTimestamps[i] = randTimestamp;
+      timestampColVector.set(i, randTimestamp);
+    }
+    for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
+      Timestamp retrievedTimestamp = timestampColVector.asScratchTimestamp(i);
+      Timestamp randTimestamp = randTimestamps[i];
+      if (!retrievedTimestamp.equals(randTimestamp)) {
+        assertTrue(false);
+      }
+      double randDouble = TimestampWritable.getDouble(randTimestamp);
+      double retrievedDouble = timestampColVector.getDouble(i);
+      if (randDouble != retrievedDouble) {
+        assertTrue(false);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
index 8470c47..6abec86 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
@@ -48,6 +48,10 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumn
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNotNull;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNull;
@@ -63,16 +67,18 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLTrim;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringLower;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringUpper;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampDate;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFUnixTimeStampTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterStringColumnInList;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterLongColumnInList;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterDoubleColumnInList;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarLongColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar;
@@ -151,10 +157,10 @@ public class TestVectorizationContext {
 
   @Test
   public void testVectorExpressionDescriptor() {
-    VectorUDFUnixTimeStampLong v1 = new VectorUDFUnixTimeStampLong();
+    VectorUDFUnixTimeStampDate v1 = new VectorUDFUnixTimeStampDate();
     VectorExpressionDescriptor.Builder builder1 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d1 = builder1.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATE_INTERVAL_YEAR_MONTH)
         .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
     assertTrue(d1.matches(v1.getDescriptor()));
 
@@ -1012,7 +1018,7 @@ public class TestVectorizationContext {
     Assert.assertEquals(BRoundWithNumDigitsDoubleToDouble.class, ve.getClass());
     Assert.assertEquals(4, ((BRoundWithNumDigitsDoubleToDouble) ve).getDecimalPlaces().get());
 
-    // Log with int base
+    // Logger with int base
     gudfBridge = new GenericUDFBridge("log", false, UDFLog.class.getName());
     mathFuncExpr.setGenericUDF(gudfBridge);
     children2.clear();
@@ -1023,7 +1029,7 @@ public class TestVectorizationContext {
     Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass());
     Assert.assertTrue(4 == ((FuncLogWithBaseDoubleToDouble) ve).getBase());
 
-    // Log with default base
+    // Logger with default base
     children2.clear();
     children2.add(colDesc2);
     mathFuncExpr.setChildren(children2);
@@ -1086,14 +1092,14 @@ public class TestVectorizationContext {
     tsFuncExpr.setGenericUDF(gudfBridge);
     tsFuncExpr.setChildren(children);
     VectorExpression ve = vc.getVectorExpression(tsFuncExpr);
-    Assert.assertEquals(VectorUDFYearLong.class, ve.getClass());
+    Assert.assertEquals(VectorUDFYearTimestamp.class, ve.getClass());
 
     //GenericUDFToUnixTimeStamp
     GenericUDFToUnixTimeStamp gudf = new GenericUDFToUnixTimeStamp();
     tsFuncExpr.setGenericUDF(gudf);
     tsFuncExpr.setTypeInfo(TypeInfoFactory.longTypeInfo);
     ve = vc.getVectorExpression(tsFuncExpr);
-    Assert.assertEquals(VectorUDFUnixTimeStampLong.class, ve.getClass());
+    Assert.assertEquals(VectorUDFUnixTimeStampTimestamp.class, ve.getClass());
   }
 
   @Test
@@ -1353,7 +1359,7 @@ public class TestVectorizationContext {
     children1.set(1, col2Expr);
     children1.set(2, col3Expr);
     ve = vc.getVectorExpression(exprDesc);
-    assertTrue(ve instanceof IfExprLongColumnLongColumn);
+    assertTrue(ve instanceof IfExprTimestampColumnColumn);
 
     // timestamp column/scalar IF where scalar is really a CAST of a constant to timestamp.
     ExprNodeGenericFuncDesc f = new ExprNodeGenericFuncDesc();
@@ -1368,20 +1374,20 @@ public class TestVectorizationContext {
     // We check for two different classes below because initially the result
     // is IfExprLongColumnLongColumn but in the future if the system is enhanced
     // with constant folding then the result will be IfExprLongColumnLongScalar.
-    assertTrue(IfExprLongColumnLongColumn.class == ve.getClass()
-               || IfExprLongColumnLongScalar.class == ve.getClass());
+    assertTrue(IfExprTimestampColumnColumn.class == ve.getClass()
+               || IfExprTimestampColumnScalar.class == ve.getClass());
 
     // timestamp scalar/scalar
     children1.set(1, f);
     ve = vc.getVectorExpression(exprDesc);
-    assertTrue(IfExprLongColumnLongColumn.class == ve.getClass()
-        || IfExprLongScalarLongScalar.class == ve.getClass());
+    assertTrue(IfExprTimestampColumnColumn.class == ve.getClass()
+        || IfExprTimestampScalarScalar.class == ve.getClass());
 
     // timestamp scalar/column
     children1.set(2, col3Expr);
     ve = vc.getVectorExpression(exprDesc);
-    assertTrue(IfExprLongColumnLongColumn.class == ve.getClass()
-        || IfExprLongScalarLongColumn.class == ve.getClass());
+    assertTrue(IfExprTimestampColumnColumn.class == ve.getClass()
+        || IfExprTimestampScalarColumn.class == ve.getClass());
 
     // test for boolean type
     col2Expr = new  ExprNodeColumnDesc(Boolean.class, "col2", "table", false);

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
index c2bf85a..7b07293 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
@@ -44,9 +44,9 @@ public class TestConstantVectorExpression {
     String str = "alpha";
     ConstantVectorExpression bytesCve = new ConstantVectorExpression(2, str.getBytes());
     HiveDecimal decVal = HiveDecimal.create("25.8");
-    ConstantVectorExpression decimalCve = new ConstantVectorExpression(3, decVal);
+    ConstantVectorExpression decimalCve = new ConstantVectorExpression(3, decVal, "decimal");
     ConstantVectorExpression nullCve = new ConstantVectorExpression(4, "string", true);
-    
+
     int size = 20;
     VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(size, 5, 0);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
index 6bd4be1..e4e417e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
@@ -100,7 +100,7 @@ public class TestVectorDateExpressions {
 
   private void verifyUDFYear(VectorizedRowBatch batch) {
     VectorExpression udf = null;
-    udf = new VectorUDFYearLong(0, 1);
+    udf = new VectorUDFYearDate(0, 1);
     udf.setInputTypes(VectorExpression.Type.DATE);
     udf.evaluate(batch);
     final int in = 0;
@@ -162,7 +162,7 @@ public class TestVectorDateExpressions {
 
   private void verifyUDFDayOfMonth(VectorizedRowBatch batch) {
     VectorExpression udf = null;
-    udf = new VectorUDFDayOfMonthLong(0, 1);
+    udf = new VectorUDFDayOfMonthDate(0, 1);
     udf.setInputTypes(VectorExpression.Type.DATE);
     udf.evaluate(batch);
     final int in = 0;
@@ -224,7 +224,7 @@ public class TestVectorDateExpressions {
 
   private void verifyUDFMonth(VectorizedRowBatch batch) {
     VectorExpression udf;
-      udf = new VectorUDFMonthLong(0, 1);
+      udf = new VectorUDFMonthDate(0, 1);
     udf.setInputTypes(VectorExpression.Type.DATE);
     udf.evaluate(batch);
     final int in = 0;
@@ -300,7 +300,7 @@ public class TestVectorDateExpressions {
 
   private void verifyUDFUnixTimeStamp(VectorizedRowBatch batch) {
     VectorExpression udf;
-    udf = new VectorUDFUnixTimeStampLong(0, 1);
+    udf = new VectorUDFUnixTimeStampDate(0, 1);
     udf.setInputTypes(VectorExpression.Type.DATE);
     udf.evaluate(batch);
     final int in = 0;
@@ -362,7 +362,7 @@ public class TestVectorDateExpressions {
 
   private void verifyUDFWeekOfYear(VectorizedRowBatch batch) {
     VectorExpression udf;
-    udf = new VectorUDFWeekOfYearLong(0, 1);
+    udf = new VectorUDFWeekOfYearDate(0, 1);
     udf.setInputTypes(VectorExpression.Type.DATE);
     udf.evaluate(batch);
     final int in = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
index 6523e7b..02602f4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
@@ -25,14 +25,13 @@ import java.util.Random;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.util.VectorizedRowGroupGenUtil;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -45,7 +44,6 @@ import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -84,6 +82,11 @@ public class TestVectorExpressionWriters {
     return null;
   }
 
+
+  private Writable getWritableValue(TypeInfo ti, Timestamp value) {
+    return new TimestampWritable(value);
+  }
+
   private Writable getWritableValue(TypeInfo ti, HiveDecimal value) {
     return new HiveDecimalWritable(value);
   }
@@ -113,7 +116,6 @@ public class TestVectorExpressionWriters {
       return new BooleanWritable( value == 0 ? false : true);
     } else if (ti.equals(TypeInfoFactory.timestampTypeInfo)) {
       Timestamp ts = new Timestamp(value);
-      TimestampUtils.assignTimeInNanoSec(value, ts);
       TimestampWritable tw = new TimestampWritable(ts);
       return tw;
     }
@@ -199,13 +201,6 @@ public class TestVectorExpressionWriters {
       Writable w = (Writable) vew.writeValue(lcv, i);
       if (w != null) {
         Writable expected = getWritableValue(type, lcv.vector[i]);
-        if (expected instanceof TimestampWritable) {
-          TimestampWritable t1 = (TimestampWritable) expected;
-          TimestampWritable t2 = (TimestampWritable) w;
-          Assert.assertTrue(t1.getNanos() == t2.getNanos());
-          Assert.assertTrue(t1.getSeconds() == t2.getSeconds());
-          continue;
-        }
         Assert.assertEquals(expected, w);
       } else {
         Assert.assertTrue(lcv.isNull[i]);
@@ -226,20 +221,57 @@ public class TestVectorExpressionWriters {
       values[i] = vew.setValue(values[i], lcv, i);
       if (values[i] != null) {
         Writable expected = getWritableValue(type, lcv.vector[i]);
-        if (expected instanceof TimestampWritable) {
-          TimestampWritable t1 = (TimestampWritable) expected;
-          TimestampWritable t2 = (TimestampWritable) values[i];
-          Assert.assertTrue(t1.getNanos() == t2.getNanos());
-          Assert.assertTrue(t1.getSeconds() == t2.getSeconds());
-          continue;
-        }
         Assert.assertEquals(expected, values[i]);
       } else {
         Assert.assertTrue(lcv.isNull[i]);
       }
     }
   }
-  
+
+  private void testWriterTimestamp(TypeInfo type) throws HiveException {
+    Timestamp[] timestampValues = new Timestamp[vectorSize];
+    TimestampColumnVector tcv =
+        VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false,
+        vectorSize, new Random(10), timestampValues);
+    tcv.isNull[3] = true;
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      Writable w = (Writable) vew.writeValue(tcv, i);
+      if (w != null) {
+        Writable expected = getWritableValue(type, timestampValues[i]);
+        TimestampWritable t1 = (TimestampWritable) expected;
+        TimestampWritable t2 = (TimestampWritable) w;
+        Assert.assertTrue(t1.equals(t2));
+       } else {
+        Assert.assertTrue(tcv.isNull[i]);
+      }
+    }
+  }
+
+  private void testSetterTimestamp(TypeInfo type) throws HiveException {
+    Timestamp[] timestampValues = new Timestamp[vectorSize];
+    TimestampColumnVector tcv =
+        VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false,
+        vectorSize, new Random(10), timestampValues);
+    tcv.isNull[3] = true;
+
+    Object[] values = new Object[this.vectorSize];
+
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      values[i] = null;  // setValue() should be able to handle null input
+      values[i] = vew.setValue(values[i], tcv, i);
+      if (values[i] != null) {
+        Writable expected = getWritableValue(type, timestampValues[i]);
+        TimestampWritable t1 = (TimestampWritable) expected;
+        TimestampWritable t2 = (TimestampWritable) values[i];
+        Assert.assertTrue(t1.equals(t2));
+      } else {
+        Assert.assertTrue(tcv.isNull[i]);
+      }
+    }
+  }
+
   private StructObjectInspector genStructOI() {
     ArrayList<String> fieldNames1 = new ArrayList<String>();
     fieldNames1.add("theInt");
@@ -427,14 +459,14 @@ public class TestVectorExpressionWriters {
 
   @Test
   public void testVectorExpressionWriterTimestamp() throws HiveException {
-    testWriterLong(TypeInfoFactory.timestampTypeInfo);
+    testWriterTimestamp(TypeInfoFactory.timestampTypeInfo);
   }
 
   @Test
   public void testVectorExpressionSetterTimestamp() throws HiveException {
-    testSetterLong(TypeInfoFactory.timestampTypeInfo);
+    testSetterTimestamp(TypeInfoFactory.timestampTypeInfo);
   }
-  
+
   @Test
   public void testVectorExpressionWriterByte() throws HiveException {
     testWriterLong(TypeInfoFactory.byteTypeInfo);
@@ -469,67 +501,9 @@ public class TestVectorExpressionWriters {
   public void testVectorExpressionWriterBinary() throws HiveException {
     testWriterText(TypeInfoFactory.binaryTypeInfo);
   }
-  
+
   @Test
   public void testVectorExpressionSetterBinary() throws HiveException {
     testSetterText(TypeInfoFactory.binaryTypeInfo);
   }
-
-  @Test
-  public void testTimeStampUtils(){
-    Timestamp ts = new Timestamp(0);
-
-    // Convert positive nanoseconds to timestamp object.
-    TimestampUtils.assignTimeInNanoSec(1234567891, ts);
-    Assert.assertEquals(234567891, ts.getNanos());
-    Assert.assertEquals(1234567891, TimestampUtils.getTimeNanoSec(ts));
-
-    // Test negative nanoseconds
-    TimestampUtils.assignTimeInNanoSec(-1234567891, ts);
-    Assert.assertEquals((1000000000-234567891), ts.getNanos());
-    Assert.assertEquals(-1234567891, TimestampUtils.getTimeNanoSec(ts));
-
-    // Test positive value smaller than a second.
-    TimestampUtils.assignTimeInNanoSec(234567891, ts);
-    Assert.assertEquals(234567891, ts.getNanos());
-    Assert.assertEquals(234567891, TimestampUtils.getTimeNanoSec(ts));
-
-    // Test negative value smaller than a second.
-    TimestampUtils.assignTimeInNanoSec(-234567891, ts);
-    Assert.assertEquals((1000000000-234567891), ts.getNanos());
-    Assert.assertEquals(-234567891, TimestampUtils.getTimeNanoSec(ts));
-
-    // Test a positive long timestamp
-    long big = 152414813551296L;
-    TimestampUtils.assignTimeInNanoSec(big, ts);
-    Assert.assertEquals(big % 1000000000, ts.getNanos());
-    Assert.assertEquals(big, TimestampUtils.getTimeNanoSec(ts));
-
-    // Test a negative long timestamp
-    big = -152414813551296L;
-    TimestampUtils.assignTimeInNanoSec(big, ts);
-    Assert.assertEquals((1000000000 + (big % 1000000000)), ts.getNanos());
-    Assert.assertEquals(big, TimestampUtils.getTimeNanoSec(ts));
-
-    // big/1000000 will yield zero nanoseconds
-    big = -1794750230000828416L;
-    ts = new Timestamp(0);
-    TimestampUtils.assignTimeInNanoSec(big, ts);
-    Assert.assertEquals((1000000000 + big % 1000000000), ts.getNanos());
-    Assert.assertEquals(big, TimestampUtils.getTimeNanoSec(ts));
-
-    // Very small nanosecond part
-    big = 1700000000000000016L;
-    ts = new Timestamp(0);
-    TimestampUtils.assignTimeInNanoSec(big, ts);
-    Assert.assertEquals(big % 1000000000, ts.getNanos());
-    Assert.assertEquals(big, TimestampUtils.getTimeNanoSec(ts));
-
-    // Very small nanosecond part
-    big = -1700000000000000016L;
-    ts = new Timestamp(0);
-    TimestampUtils.assignTimeInNanoSec(big, ts);
-    Assert.assertEquals((1000000000 + big % 1000000000), ts.getNanos());
-    Assert.assertEquals(big, TimestampUtils.getTimeNanoSec(ts));
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
index 3841317..80f55dc 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColGreaterEqualDecimalColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColLessDecimalScalar;
@@ -49,6 +49,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnN
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColEqualDecimalScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColEqualDecimalColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalScalarEqualDecimalColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterTimestampColumnBetween;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterTimestampColumnNotBetween;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.util.VectorizedRowGroupGenUtil;
 import org.junit.Assert;
@@ -586,23 +588,23 @@ public class TestVectorFilterExpressions {
 
   @Test
   public void testFilterTimestampBetween() {
-    int seed = 17;
-    VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
-        5, 2, seed);
-    LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0];
-    long startTS = 0; // the epoch
-    long endTS = TimestampUtils.getTimeNanoSec(
-        Timestamp.valueOf("2013-11-05 00:00:00.000000000"));
+
+    VectorizedRowBatch vrb = new VectorizedRowBatch(1);
+    vrb.cols[0] = new TimestampColumnVector();
+
+    TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0];
+    Timestamp startTS = new Timestamp(0); // the epoch
+    Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000");
 
     Timestamp ts0 = Timestamp.valueOf("1963-11-06 00:00:00.000");
-    lcv0.vector[0] = TimestampUtils.getTimeNanoSec(ts0);
+    lcv0.set(0, ts0);
     Timestamp ts1 = Timestamp.valueOf("1983-11-06 00:00:00.000");
-    lcv0.vector[1] = TimestampUtils.getTimeNanoSec(ts1);
+    lcv0.set(1, ts1);
     Timestamp ts2 = Timestamp.valueOf("2099-11-06 00:00:00.000");
-    lcv0.vector[2] = TimestampUtils.getTimeNanoSec(ts2);
+    lcv0.set(2, ts2);
     vrb.size = 3;
 
-    VectorExpression expr1 = new FilterLongColumnBetween(0, startTS, endTS);
+    VectorExpression expr1 = new FilterTimestampColumnBetween(0, startTS, endTS);
     expr1.evaluate(vrb);
     assertEquals(1, vrb.size);
     assertEquals(true, vrb.selectedInUse);
@@ -611,24 +613,22 @@ public class TestVectorFilterExpressions {
 
   @Test
   public void testFilterTimestampNotBetween() {
-    int seed = 17;
-    VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
-        5, 2, seed);
-    LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0];
-    long startTS = TimestampUtils.getTimeNanoSec(
-        Timestamp.valueOf("2013-11-05 00:00:00.000000000"));
-    long endTS = TimestampUtils.getTimeNanoSec(
-        Timestamp.valueOf("2013-11-05 00:00:00.000000010"));
+    VectorizedRowBatch vrb = new VectorizedRowBatch(1);
+    vrb.cols[0] = new TimestampColumnVector();
+
+    TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0];
+    Timestamp startTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000");
+    Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000010");
 
     Timestamp ts0 = Timestamp.valueOf("2013-11-04 00:00:00.000000000");
-    lcv0.vector[0] = TimestampUtils.getTimeNanoSec(ts0);
+    lcv0.set(0, ts0);
     Timestamp ts1 = Timestamp.valueOf("2013-11-05 00:00:00.000000002");
-    lcv0.vector[1] = TimestampUtils.getTimeNanoSec(ts1);
+    lcv0.set(1, ts1);
     Timestamp ts2 = Timestamp.valueOf("2099-11-06 00:00:00.000");
-    lcv0.vector[2] = TimestampUtils.getTimeNanoSec(ts2);
+    lcv0.set(2, ts2);
     vrb.size = 3;
 
-    VectorExpression expr1 = new FilterLongColumnNotBetween(0, startTS, endTS);
+    VectorExpression expr1 = new FilterTimestampColumnNotBetween(0, startTS, endTS);
     expr1.evaluate(vrb);
     assertEquals(2, vrb.size);
     assertEquals(true, vrb.selectedInUse);

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
index 74f4671..3f2b031 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.junit.Assert;
@@ -29,6 +30,7 @@ import org.junit.Test;
 
 import java.io.UnsupportedEncodingException;
 import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.List;
@@ -53,21 +55,21 @@ public class TestVectorGenericDateExpressions {
     return vector;
   }
 
-  private LongColumnVector toTimestamp(LongColumnVector date) {
-    LongColumnVector vector = new LongColumnVector(size);
+  private TimestampColumnVector toTimestamp(LongColumnVector date) {
+    TimestampColumnVector vector = new TimestampColumnVector(size);
     for (int i = 0; i < size; i++) {
       if (date.isNull[i]) {
         vector.isNull[i] = true;
         vector.noNulls = false;
       } else {
-        vector.vector[i] = toTimestamp(date.vector[i]);
+        vector.set(i, toTimestamp(date.vector[i]));
       }
     }
     return vector;
   }
 
-  private long toTimestamp(long date) {
-    return DateWritable.daysToMillis((int) date) * 1000000;
+  private Timestamp toTimestamp(long date) {
+    return new Timestamp(DateWritable.daysToMillis((int) date));
   }
 
   private BytesColumnVector toString(LongColumnVector date) {
@@ -474,7 +476,7 @@ public class TestVectorGenericDateExpressions {
     }
     VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
 
-    udf = new VectorUDFDateDiffScalarCol(0, 0, 1);
+    udf = new VectorUDFDateDiffScalarCol(new Timestamp(0), 0, 1);
     udf.setInputTypes(VectorExpression.Type.TIMESTAMP, VectorExpression.Type.STRING);
     batch.cols[0] = new BytesColumnVector(1);
     batch.cols[1] = new LongColumnVector(1);
@@ -615,7 +617,7 @@ public class TestVectorGenericDateExpressions {
 
     udf.setInputTypes(VectorExpression.Type.STRING, VectorExpression.Type.TIMESTAMP);
     batch.cols[0] = new BytesColumnVector(1);
-    batch.cols[1] = new LongColumnVector(1);
+    batch.cols[1] = new TimestampColumnVector(1);
     batch.cols[2] = new LongColumnVector(1);
     bcv = (BytesColumnVector) batch.cols[0];
     bcv.vector[0] = bytes;
@@ -625,7 +627,7 @@ public class TestVectorGenericDateExpressions {
     Assert.assertEquals(batch.cols[2].isNull[0], true);
 
     udf.setInputTypes(VectorExpression.Type.TIMESTAMP, VectorExpression.Type.STRING);
-    batch.cols[0] = new LongColumnVector(1);
+    batch.cols[0] = new TimestampColumnVector(1);
     batch.cols[1] = new BytesColumnVector(1);
     batch.cols[2] = new LongColumnVector(1);
     bcv = (BytesColumnVector) batch.cols[1];
@@ -640,6 +642,8 @@ public class TestVectorGenericDateExpressions {
     VectorExpression udf;
     if (colType == VectorExpression.Type.STRING) {
       udf = new VectorUDFDateString(0, 1);
+    } else if (colType == VectorExpression.Type.TIMESTAMP) {
+      udf = new VectorUDFDateTimestamp(0, 1);
     } else {
       udf = new VectorUDFDateLong(0, 1);
     }
@@ -708,6 +712,8 @@ public class TestVectorGenericDateExpressions {
         colType == VectorExpression.Type.CHAR ||
         colType == VectorExpression.Type.VARCHAR) {
       udf = new CastStringToDate(0, 1);
+    } else if (colType == VectorExpression.Type.TIMESTAMP) {
+      udf = new CastTimestampToDate(0, 1);
     } else {
       udf = new CastLongToDate(0, 1);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
index 77365a8..31add6e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
@@ -19,13 +19,17 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.io.UnsupportedEncodingException;
+import java.sql.Timestamp;
 import java.util.Arrays;
+import java.util.Random;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncACosDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncASinDoubleToDouble;
@@ -50,6 +54,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSignLongToDoubl
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanDoubleToDouble;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.junit.Test;
 
 
@@ -130,6 +135,27 @@ public class TestVectorMathFunctions {
     return batch;
   }
 
+  public static VectorizedRowBatch getVectorizedRowBatchDoubleInTimestampOut() {
+    VectorizedRowBatch batch = new VectorizedRowBatch(2);
+    TimestampColumnVector tcv;
+    DoubleColumnVector dcv;
+    tcv = new TimestampColumnVector();
+    dcv = new DoubleColumnVector();
+    dcv.vector[0] = -1.5d;
+    dcv.vector[1] = -0.5d;
+    dcv.vector[2] = -0.1d;
+    dcv.vector[3] = 0d;
+    dcv.vector[4] = 0.5d;
+    dcv.vector[5] = 0.7d;
+    dcv.vector[6] = 1.5d;
+
+    batch.cols[0] = dcv;
+    batch.cols[1] = tcv;
+
+    batch.size = 7;
+    return batch;
+  }
+
   public static VectorizedRowBatch getVectorizedRowBatchDoubleInDoubleOut() {
     VectorizedRowBatch batch = new VectorizedRowBatch(2);
     DoubleColumnVector inV;
@@ -171,6 +197,25 @@ public class TestVectorMathFunctions {
     return batch;
   }
 
+  public static VectorizedRowBatch getVectorizedRowBatchTimestampInDoubleOut(double[] doubleValues) {
+    Random r = new Random(45993);
+    VectorizedRowBatch batch = new VectorizedRowBatch(2);
+    TimestampColumnVector tcv;
+    DoubleColumnVector dcv;
+    tcv = new TimestampColumnVector(doubleValues.length);
+    dcv = new DoubleColumnVector(doubleValues.length);
+    for (int i = 0; i < doubleValues.length; i++) {
+      doubleValues[i] = r.nextDouble() % (double) SECONDS_LIMIT;
+      dcv.vector[i] = doubleValues[i];
+    }
+
+    batch.cols[0] = tcv;
+    batch.cols[1] = dcv;
+
+    batch.size = doubleValues.length;
+    return batch;
+  }
+
   public static VectorizedRowBatch getVectorizedRowBatchLongInLongOut() {
     VectorizedRowBatch batch = new VectorizedRowBatch(2);
     LongColumnVector inV, outV;
@@ -186,6 +231,48 @@ public class TestVectorMathFunctions {
     return batch;
   }
 
+  public static VectorizedRowBatch getVectorizedRowBatchTimestampInLongOut(long[] longValues) {
+    Random r = new Random(345);
+    VectorizedRowBatch batch = new VectorizedRowBatch(2);
+    TimestampColumnVector inV;
+    LongColumnVector outV;
+    inV = new TimestampColumnVector(longValues.length);
+    outV = new LongColumnVector(longValues.length);
+    for (int i = 0; i < longValues.length; i++) {
+      Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
+      longValues[i] = TimestampWritable.getLong(randTimestamp);
+      inV.set(0, randTimestamp);
+    }
+
+    batch.cols[0] = inV;
+    batch.cols[1] = outV;
+
+    batch.size = longValues.length;
+    return batch;
+  }
+
+  static long SECONDS_LIMIT = 60L * 24L * 365L * 9999L;
+
+  public static VectorizedRowBatch getVectorizedRowBatchLongInTimestampOut(long[] longValues) {
+    Random r = new Random(12099);
+    VectorizedRowBatch batch = new VectorizedRowBatch(2);
+    LongColumnVector inV;
+    TimestampColumnVector outV;
+    inV = new LongColumnVector();
+    outV = new TimestampColumnVector();
+
+    for (int i = 0; i < longValues.length; i++) {
+      longValues[i] = r.nextLong() % SECONDS_LIMIT;
+      inV.vector[i] = longValues[i];
+    }
+
+    batch.cols[0] = inV;
+    batch.cols[1] = outV;
+
+    batch.size = longValues.length;
+    return batch;
+  }
+
   public static VectorizedRowBatch getBatchForStringMath() {
     VectorizedRowBatch batch = new VectorizedRowBatch(3);
     LongColumnVector inL;


[14/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareScalar.txt
deleted file mode 100644
index da33281..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareScalar.txt
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-
-
-/**
- * Generated from template TimestampColumnCompareScalar.txt, which covers comparison 
- * expressions between a timestamp column and a long or double scalar. The boolean output
- * is stored in a separate boolean column.
- * Note: For timestamp and long or double we implicitly interpret the long as the number
- * of seconds or double as seconds and fraction since the epoch.
- */
-public class <ClassName> extends <BaseClassName> {
-
-  public <ClassName>(int colNum, <OperandType> value, int outputColumn) {
-    super(colNum, TimestampUtils.<TimestampScalarConversion>(value), outputColumn);
-  }
-
-  public <ClassName>() {
-    super();
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt
new file mode 100644
index 0000000..f9fc425
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampColumn.txt
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template TimestampColumnCompareColumn.txt, which covers comparision
+ * expressions between timestamp columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #1 is type <OperandType>.
+    <InputColumnVectorType> inputColVector1 = (<InputColumnVectorType>) batch.cols[colNum1];
+
+     // Input #2 is type <OperandType>.
+    <InputColumnVectorType> inputColVector2 = (<InputColumnVectorType>) batch.cols[colNum2];
+
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      outputVector[0] = inputColVector1.compareTo(0, inputColVector2.asScratch<CamelOperandType>(0)) <OperatorSymbol> 0 ? 1 : 0;
+    } else if (inputColVector1.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector1.compareTo(0, inputColVector2.asScratch<CamelOperandType>(i)) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector1.compareTo(0, inputColVector2.asScratch<CamelOperandType>(i)) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <HiveOperandType> value2 = inputColVector2.asScratch<CamelOperandType>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector1.compareTo(i, value2) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector1.compareTo(i, value2) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector1.compareTo(i, inputColVector2.asScratch<CamelOperandType>(i)) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector1.compareTo(i, inputColVector2.asScratch<CamelOperandType>(i)) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesLong(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
index 46534b4..90701ec 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
@@ -15,27 +15,121 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- 
+
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
-
 /**
- * Generated from template TimestampColumnCompareTimestampScalar.txt, which covers comparison 
- * expressions between a timestamp column and a timestamp scalar. The boolean output
- * is stored in a separate boolean column.
+ * Generated from template TimestampColumnCompareTimestampScalar.txt, which covers binary comparison
+ * expressions between a column and a scalar. The boolean output is stored in a
+ * separate boolean column.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
 
-  public <ClassName>(int colNum, long value, int outputColumn) {
-    super(colNum, value, outputColumn);
+  private int colNum;
+  private <HiveOperandType> value;
+  private int outputColumn;
+
+  public <ClassName>(int colNum, <HiveOperandType> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
   }
 
   public <ClassName>() {
-    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #1 is type <OperandType>.
+    <InputColumnVectorType> inputColVector1 = (<InputColumnVectorType>) batch.cols[colNum];
+
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector1.isNull;
+    boolean[] outNulls = outputColVector.isNull;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    if (inputColVector1.noNulls) {
+      if (inputColVector1.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        outputVector[0] = inputColVector1.compareTo(0, value) <OperatorSymbol> 0 ? 1 : 0;
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector1.compareTo(i, value) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector1.compareTo(i, value) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      }
+    } else {
+      if (inputColVector1.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          outputVector[0] = inputColVector1.compareTo(0, value) <OperatorSymbol> 0 ? 1 : 0;
+          outNulls[0] = false;
+        } else {
+          outNulls[0] = true;
+        }
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+            outputVector[i] = inputColVector1.compareTo(i, value) <OperatorSymbol> 0 ? 1 : 0;
+            outNulls[i] = false;
+          } else {
+            //comparison with null is null
+            outNulls[i] = true;
+          }
+        }
+      } else {
+        System.arraycopy(nullPos, 0, outNulls, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            outputVector[i] = inputColVector1.compareTo(i, value) <OperatorSymbol> 0 ? 1 : 0;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
   }
 
   @Override
@@ -45,8 +139,8 @@ public class <ClassName> extends <BaseClassName> {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
             VectorExpressionDescriptor.InputExpressionType.SCALAR).build();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
new file mode 100644
index 0000000..f958be8
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template TimestampScalarArithmeticDateColumnBase.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType1> value;
+  private int outputColumn;
+  private Timestamp scratchTimestamp2;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(<HiveOperandType1> value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+    scratchTimestamp2 = new Timestamp(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type date.
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
+
+     // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    long[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.<OperatorMethod>(
+          value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
new file mode 100644
index 0000000..585027a
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampScalarArithmeticIntervalYearMonthColumn.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth2;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(Timestamp value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth2 = new HiveIntervalYearMonth();
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type Interval_Year_Month (months).
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
+
+        // Output is type Timestamp.
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    long[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      dtm.<OperatorMethod>(
+          value, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+      outputColVector.setFromScratchTimestamp(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+             value, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+             value, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+             value, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+             value, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
new file mode 100644
index 0000000..996c86a
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampScalarArithmeticTimestampColumnBase.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType1> value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(<HiveOperandType1> value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type <OperandType2>.
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      dtm.<OperatorMethod>(
+          value, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt
new file mode 100644
index 0000000..6815b5b
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareLongDoubleColumn.txt
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnCompareScalar.txt, which covers binary comparison
+ * expressions between a column and a scalar. The boolean output is stored in a
+ * separate boolean column.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  private static final long serialVersionUID = 1L;
+
+  public <ClassName>(Timestamp value, int colNum, int outputColumn) {
+    super(TimestampColumnVector.<GetTimestampLongDoubleMethod>(value), colNum, outputColumn);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
index 9468a66..6506c93 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
@@ -15,34 +15,123 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- 
+
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.udf.UDFToString;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import java.sql.Timestamp;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
-import org.apache.hadoop.io.LongWritable;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
- * Generated from template TimestampScalarCompareTimestampColumn.txt, which covers comparison 
- * expressions between a timestamp column and a timestamp scalar. The boolean output
- * is stored in a separate boolean column.
+ * Generated from template ScalarCompareTimestamp.txt, which covers comparison
+ * expressions between a long/double scalar and a column. The boolean output is stored in a
+ * separate boolean column.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType> value;
+  private int outputColumn;
 
-  public <ClassName>(long value, int colNum, int outputColumn) {
-    super(value, colNum, outputColumn);
+  public <ClassName>(<HiveOperandType> value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
   }
 
   public <ClassName>() {
-    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #2 is type <OperandType>.
+    <InputColumnVectorType> inputColVector2 = (<InputColumnVectorType>) batch.cols[colNum];
+
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector2.isNull;
+    boolean[] outNulls = outputColVector.isNull;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    if (inputColVector2.noNulls) {
+      if (inputColVector2.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        outputVector[0] = inputColVector2.compareTo(value, 0) <OperatorSymbol> 0 ? 1 : 0;
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector2.compareTo(value, i) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector2.compareTo(value, i) <OperatorSymbol> 0 ? 1 : 0;
+        }
+      }
+    } else {
+      if (inputColVector2.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          outputVector[0] = inputColVector2.compareTo(value, 0) <OperatorSymbol> 0 ? 1 : 0;
+          outNulls[0] = false;
+        } else {
+          outNulls[0] = true;
+        }
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+            outputVector[i] = inputColVector2.compareTo(value, i) <OperatorSymbol> 0 ? 1 : 0;
+            outNulls[i] = false;
+          } else {
+            //comparison with null is null
+            outNulls[i] = true;
+          }
+        }
+      } else {
+        System.arraycopy(nullPos, 0, outNulls, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            outputVector[i] = inputColVector2.compareTo(value, i) <OperatorSymbol> 0 ? 1 : 0;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
   }
 
   @Override
@@ -52,8 +141,8 @@ public class <ClassName> extends <BaseClassName> {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.SCALAR,
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt
index 57a0e5d..a9a3b6d 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- 
+
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.<BaseClassName>;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
new file mode 100644
index 0000000..3cdf7e2
--- /dev/null
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt
@@ -0,0 +1,454 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+
+/**
+* <ClassName>. Vectorized implementation for MIN/MAX aggregates.
+*/
+@Description(name = "<DescriptionName>",
+    value = "<DescriptionValue>")
+public class <ClassName> extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * class for storing the current aggregate value.
+     */
+    static private final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private final HiveIntervalDayTime value;
+
+      /**
+      * Value is explicitly (re)initialized in reset()
+      */
+      transient private boolean isNull = true;
+
+      public Aggregation() {
+        value = new HiveIntervalDayTime();
+      }
+
+      public void checkValue(IntervalDayTimeColumnVector colVector, int index) {
+        if (isNull) {
+          isNull = false;
+          colVector.intervalDayTimeUpdate(this.value, index);
+        } else if (colVector.compareTo(this.value, index) <OperatorSymbol> 0) {
+          colVector.intervalDayTimeUpdate(this.value, index);
+        }
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        this.value.set(0, 0);
+      }
+    }
+
+    private VectorExpression inputExpression;
+    private transient VectorExpressionWriter resultWriter;
+
+    public <ClassName>(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public <ClassName>() {
+      super();
+    }
+
+    @Override
+    public void init(AggregationDesc desc) throws HiveException {
+      resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable(
+          desc.getParameters().get(0));
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregrateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregrateIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+      IntervalDayTimeColumnVector inputColVector = (IntervalDayTimeColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      if (inputColVector.noNulls) {
+        if (inputColVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, aggregrateIndex,
+            inputColVector, batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize);
+          }
+        }
+      } else {
+        if (inputColVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, inputColVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, inputColVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregrateIndex,
+          i);
+        // Repeating use index 0.
+        myagg.checkValue(inputColVector, 0);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregrateIndex,
+          i);
+        myagg.checkValue(inputColVector, selection[i]);
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregrateIndex,
+          i);
+        myagg.checkValue(inputColVector, i);
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[selection[i]]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            i);
+          // Repeating use index 0.
+          myagg.checkValue(inputColVector, 0);
+        }
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            i);
+          // Repeating use index 0.
+          myagg.checkValue(inputColVector, 0);
+        }
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int j=0; j < batchSize; ++j) {
+        int i = selection[j];
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            j);
+          myagg.checkValue(inputColVector, i);
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      IntervalDayTimeColumnVector inputColVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            i);
+          myagg.checkValue(inputColVector, i);
+        }
+      }
+   }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+      throws HiveException {
+
+        inputExpression.evaluate(batch);
+
+        IntervalDayTimeColumnVector inputColVector = (IntervalDayTimeColumnVector)batch.
+            cols[this.inputExpression.getOutputColumn()];
+
+        int batchSize = batch.size;
+
+        if (batchSize == 0) {
+          return;
+        }
+
+        Aggregation myagg = (Aggregation)agg;
+
+        if (inputColVector.isRepeating) {
+          if (inputColVector.noNulls &&
+            (myagg.isNull || (inputColVector.compareTo(myagg.value, 0) <OperatorSymbol> 0))) {
+            myagg.isNull = false;
+            inputColVector.intervalDayTimeUpdate(myagg.value, 0);
+          }
+          return;
+        }
+
+        if (!batch.selectedInUse && inputColVector.noNulls) {
+          iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+        }
+        else if (!batch.selectedInUse) {
+          iterateNoSelectionHasNulls(myagg, inputColVector,
+            batchSize, inputColVector.isNull);
+        }
+        else if (inputColVector.noNulls){
+          iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+        }
+        else {
+          iterateSelectionHasNulls(myagg, inputColVector,
+            batchSize, inputColVector.isNull, batch.selected);
+        }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        IntervalDayTimeColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            inputColVector.intervalDayTimeUpdate(myagg.value, i);
+          }
+          else if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
+            inputColVector.intervalDayTimeUpdate(myagg.value, i);
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        IntervalDayTimeColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        inputColVector.intervalDayTimeUpdate(myagg.value, selected[0]);
+        myagg.isNull = false;
+      }
+
+      for (int i=0; i< batchSize; ++i) {
+        int sel = selected[i];
+        if (inputColVector.compareTo(myagg.value, sel) <OperatorSymbol> 0) {
+          inputColVector.intervalDayTimeUpdate(myagg.value, sel);
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        IntervalDayTimeColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          if (myagg.isNull) {
+            inputColVector.intervalDayTimeUpdate(myagg.value, i);
+            myagg.isNull = false;
+          }
+          else if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
+            inputColVector.intervalDayTimeUpdate(myagg.value, i);
+          }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        IntervalDayTimeColumnVector inputColVector,
+        int batchSize) {
+      if (myagg.isNull) {
+        inputColVector.intervalDayTimeUpdate(myagg.value, 0);
+        myagg.isNull = false;
+      }
+
+      for (int i=0;i<batchSize;++i) {
+        if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
+          inputColVector.intervalDayTimeUpdate(myagg.value, i);
+        }
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+    Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        return resultWriter.writeValue(myagg.value);
+      }
+    }
+
+    @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return resultWriter.getObjectInspector();
+    }
+
+    @Override
+    public int getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2(),
+      model.memoryAlign());
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
new file mode 100644
index 0000000..7e34965
--- /dev/null
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
@@ -0,0 +1,456 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+/**
+* <ClassName>. Vectorized implementation for MIN/MAX aggregates.
+*/
+@Description(name = "<DescriptionName>",
+    value = "<DescriptionValue>")
+public class <ClassName> extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * class for storing the current aggregate value.
+     */
+    static private final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private final Timestamp value;
+
+      /**
+      * Value is explicitly (re)initialized in reset()
+      */
+      transient private boolean isNull = true;
+
+      public Aggregation() {
+        value = new Timestamp(0);
+      }
+
+      public void checkValue(TimestampColumnVector colVector, int index) {
+        if (isNull) {
+          isNull = false;
+          colVector.timestampUpdate(this.value, index);
+        } else if (colVector.compareTo(this.value, index) <OperatorSymbol> 0) {
+          colVector.timestampUpdate(this.value, index);
+        }
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        this.value.setTime(0);
+      }
+    }
+
+    private VectorExpression inputExpression;
+    private transient VectorExpressionWriter resultWriter;
+
+    public <ClassName>(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public <ClassName>() {
+      super();
+    }
+
+    @Override
+    public void init(AggregationDesc desc) throws HiveException {
+      resultWriter = VectorExpressionWriterFactory.genVectorExpressionWritable(
+          desc.getParameters().get(0));
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregrateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregrateIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      if (inputColVector.noNulls) {
+        if (inputColVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, aggregrateIndex,
+            inputColVector, batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize);
+          }
+        }
+      } else {
+        if (inputColVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, inputColVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, aggregrateIndex,
+              inputColVector, batchSize, inputColVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregrateIndex,
+          i);
+        // Repeating use index 0.
+        myagg.checkValue(inputColVector, 0);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregrateIndex,
+          i);
+        myagg.checkValue(inputColVector, selection[i]);
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregrateIndex,
+          i);
+        myagg.checkValue(inputColVector, i);
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[selection[i]]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            i);
+          // Repeating use index 0.
+          myagg.checkValue(inputColVector, 0);
+        }
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            i);
+          // Repeating use index 0.
+          myagg.checkValue(inputColVector, 0);
+        }
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int j=0; j < batchSize; ++j) {
+        int i = selection[j];
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            j);
+          myagg.checkValue(inputColVector, i);
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregrateIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregrateIndex,
+            i);
+          myagg.checkValue(inputColVector, i);
+        }
+      }
+   }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+      throws HiveException {
+
+        inputExpression.evaluate(batch);
+
+        TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+            cols[this.inputExpression.getOutputColumn()];
+
+        int batchSize = batch.size;
+
+        if (batchSize == 0) {
+          return;
+        }
+
+        Aggregation myagg = (Aggregation)agg;
+
+        if (inputColVector.isRepeating) {
+          if (inputColVector.noNulls &&
+            (myagg.isNull || (inputColVector.compareTo(myagg.value, 0) <OperatorSymbol> 0))) {
+            myagg.isNull = false;
+            inputColVector.timestampUpdate(myagg.value, 0);
+          }
+          return;
+        }
+
+        if (!batch.selectedInUse && inputColVector.noNulls) {
+          iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+        }
+        else if (!batch.selectedInUse) {
+          iterateNoSelectionHasNulls(myagg, inputColVector,
+            batchSize, inputColVector.isNull);
+        }
+        else if (inputColVector.noNulls){
+          iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+        }
+        else {
+          iterateSelectionHasNulls(myagg, inputColVector,
+            batchSize, inputColVector.isNull, batch.selected);
+        }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            inputColVector.timestampUpdate(myagg.value, i);
+          }
+          else if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
+            inputColVector.timestampUpdate(myagg.value, i);
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        inputColVector.timestampUpdate(myagg.value, selected[0]);
+        myagg.isNull = false;
+      }
+
+      for (int i=0; i< batchSize; ++i) {
+        int sel = selected[i];
+        if (inputColVector.compareTo(myagg.value, sel) <OperatorSymbol> 0) {
+          inputColVector.timestampUpdate(myagg.value, sel);
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          if (myagg.isNull) {
+            inputColVector.timestampUpdate(myagg.value, i);
+            myagg.isNull = false;
+          }
+          else if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
+            inputColVector.timestampUpdate(myagg.value, i);
+          }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+      if (myagg.isNull) {
+        inputColVector.timestampUpdate(myagg.value, 0);
+        myagg.isNull = false;
+      }
+
+      for (int i=0;i<batchSize;++i) {
+        if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
+          inputColVector.timestampUpdate(myagg.value, i);
+        }
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+    Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        return resultWriter.writeValue(myagg.value);
+      }
+    }
+
+    @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return resultWriter.getObjectInspector();
+    }
+
+    @Override
+    public int getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2(),
+      model.memoryAlign());
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ColumnVector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ColumnVector.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ColumnVector.java
index fcb1ae9..c069a5f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ColumnVector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ColumnVector.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
-import java.io.IOException;
 import java.util.Arrays;
 
 /**
@@ -42,6 +41,8 @@ public abstract class ColumnVector {
     DOUBLE,
     BYTES,
     DECIMAL,
+    TIMESTAMP,
+    INTERVAL_DAY_TIME,
     STRUCT,
     LIST,
     MAP,

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/IntervalDayTimeColumnVector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/IntervalDayTimeColumnVector.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/IntervalDayTimeColumnVector.java
new file mode 100644
index 0000000..39ccea8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/IntervalDayTimeColumnVector.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec.vector;
+
+import java.util.Arrays;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * This class represents a nullable interval day time column vector capable of handing a
+ * wide range of interval day time values.
+ *
+ * We store the 2 (value) fields of a HiveIntervalDayTime class in primitive arrays.
+ *
+ * We do this to avoid an array of Java HiveIntervalDayTime objects which would have poor storage
+ * and memory access characteristics.
+ *
+ * Generally, the caller will fill in a scratch HiveIntervalDayTime object with values from a row,
+ * work using the scratch HiveIntervalDayTime, and then perhaps update the column vector row
+ * with a result.
+ */
+public class IntervalDayTimeColumnVector extends ColumnVector {
+
+  /*
+   * The storage arrays for this column vector corresponds to the storage of a HiveIntervalDayTime:
+   */
+  private long[] totalSeconds;
+      // The values from HiveIntervalDayTime.getTotalSeconds().
+
+  private int[] nanos;
+      // The values from HiveIntervalDayTime.getNanos().
+
+  /*
+   * Scratch objects.
+   */
+  private final HiveIntervalDayTime scratchIntervalDayTime;
+
+  private Writable scratchWritable;
+      // Supports keeping a HiveIntervalDayTimeWritable object without having to import
+      // that definition...
+
+  /**
+   * Use this constructor by default. All column vectors
+   * should normally be the default size.
+   */
+  public IntervalDayTimeColumnVector() {
+    this(VectorizedRowBatch.DEFAULT_SIZE);
+  }
+
+  /**
+   * Don't use this except for testing purposes.
+   *
+   * @param len the number of rows
+   */
+  public IntervalDayTimeColumnVector(int len) {
+    super(len);
+
+    totalSeconds = new long[len];
+    nanos = new int[len];
+
+    scratchIntervalDayTime = new HiveIntervalDayTime();
+
+    scratchWritable = null;     // Allocated by caller.
+  }
+
+  /**
+   * Return the number of rows.
+   * @return
+   */
+  public int getLength() {
+    return totalSeconds.length;
+  }
+
+  /**
+   * Return a row's HiveIntervalDayTime.getTotalSeconds() value.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public long getTotalSeconds(int elementNum) {
+    return totalSeconds[elementNum];
+  }
+
+  /**
+   * Return a row's HiveIntervalDayTime.getNanos() value.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public long getNanos(int elementNum) {
+    return nanos[elementNum];
+  }
+
+  /**
+   * Return a row's HiveIntervalDayTime.getDouble() value.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public double getDouble(int elementNum) {
+    return asScratchIntervalDayTime(elementNum).getDouble();
+  }
+
+  /**
+   * Set a HiveIntervalDayTime object from a row of the column.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param intervalDayTime
+   * @param elementNum
+   */
+  public void intervalDayTimeUpdate(HiveIntervalDayTime intervalDayTime, int elementNum) {
+    intervalDayTime.set(totalSeconds[elementNum], nanos[elementNum]);
+  }
+
+
+  /**
+   * Return the scratch HiveIntervalDayTime object set from a row.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public HiveIntervalDayTime asScratchIntervalDayTime(int elementNum) {
+    scratchIntervalDayTime.set(totalSeconds[elementNum], nanos[elementNum]);
+    return scratchIntervalDayTime;
+  }
+
+  /**
+   * Return the scratch HiveIntervalDayTime (contents undefined).
+   * @return
+   */
+  public HiveIntervalDayTime getScratchIntervalDayTime() {
+    return scratchIntervalDayTime;
+  }
+
+  /**
+   * Compare row to HiveIntervalDayTime.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @param intervalDayTime
+   * @return -1, 0, 1 standard compareTo values.
+   */
+  public int compareTo(int elementNum, HiveIntervalDayTime intervalDayTime) {
+    return asScratchIntervalDayTime(elementNum).compareTo(intervalDayTime);
+  }
+
+  /**
+   * Compare HiveIntervalDayTime to row.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param intervalDayTime
+   * @param elementNum
+   * @return -1, 0, 1 standard compareTo values.
+   */
+  public int compareTo(HiveIntervalDayTime intervalDayTime, int elementNum) {
+    return intervalDayTime.compareTo(asScratchIntervalDayTime(elementNum));
+  }
+
+  /**
+   * Compare a row to another TimestampColumnVector's row.
+   * @param elementNum1
+   * @param intervalDayTimeColVector2
+   * @param elementNum2
+   * @return
+   */
+  public int compareTo(int elementNum1, IntervalDayTimeColumnVector intervalDayTimeColVector2,
+      int elementNum2) {
+    return asScratchIntervalDayTime(elementNum1).compareTo(
+        intervalDayTimeColVector2.asScratchIntervalDayTime(elementNum2));
+  }
+
+  /**
+   * Compare another TimestampColumnVector's row to a row.
+   * @param intervalDayTimeColVector1
+   * @param elementNum1
+   * @param elementNum2
+   * @return
+   */
+  public int compareTo(IntervalDayTimeColumnVector intervalDayTimeColVector1, int elementNum1,
+      int elementNum2) {
+    return intervalDayTimeColVector1.asScratchIntervalDayTime(elementNum1).compareTo(
+        asScratchIntervalDayTime(elementNum2));
+  }
+
+  @Override
+  public void setElement(int outElementNum, int inputElementNum, ColumnVector inputVector) {
+
+    IntervalDayTimeColumnVector timestampColVector = (IntervalDayTimeColumnVector) inputVector;
+
+    totalSeconds[outElementNum] = timestampColVector.totalSeconds[inputElementNum];
+    nanos[outElementNum] = timestampColVector.nanos[inputElementNum];
+  }
+
+  // Simplify vector by brute-force flattening noNulls and isRepeating
+  // This can be used to reduce combinatorial explosion of code paths in VectorExpressions
+  // with many arguments.
+  public void flatten(boolean selectedInUse, int[] sel, int size) {
+    flattenPush();
+    if (isRepeating) {
+      isRepeating = false;
+      long repeatFastTime = totalSeconds[0];
+      int repeatNanos = nanos[0];
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          totalSeconds[i] = repeatFastTime;
+          nanos[i] = repeatNanos;
+        }
+      } else {
+        Arrays.fill(totalSeconds, 0, size, repeatFastTime);
+        Arrays.fill(nanos, 0, size, repeatNanos);
+      }
+      flattenRepeatingNulls(selectedInUse, sel, size);
+    }
+    flattenNoNulls(selectedInUse, sel, size);
+  }
+
+  /**
+   * Set a row from a HiveIntervalDayTime.
+   * We assume the entry has already been isRepeated adjusted.
+   * @param elementNum
+   * @param intervalDayTime
+   */
+  public void set(int elementNum, HiveIntervalDayTime intervalDayTime) {
+    this.totalSeconds[elementNum] = intervalDayTime.getTotalSeconds();
+    this.nanos[elementNum] = intervalDayTime.getNanos();
+  }
+
+  /**
+   * Set a row from the current value in the scratch interval day time.
+   * @param elementNum
+   */
+  public void setFromScratchIntervalDayTime(int elementNum) {
+    this.totalSeconds[elementNum] = scratchIntervalDayTime.getTotalSeconds();
+    this.nanos[elementNum] = scratchIntervalDayTime.getNanos();
+  }
+
+  /**
+   * Set row to standard null value(s).
+   * We assume the entry has already been isRepeated adjusted.
+   * @param elementNum
+   */
+  public void setNullValue(int elementNum) {
+    totalSeconds[elementNum] = 0;
+    nanos[elementNum] = 1;
+  }
+
+  // Copy the current object contents into the output. Only copy selected entries,
+  // as indicated by selectedInUse and the sel array.
+  public void copySelected(
+      boolean selectedInUse, int[] sel, int size, IntervalDayTimeColumnVector output) {
+
+    // Output has nulls if and only if input has nulls.
+    output.noNulls = noNulls;
+    output.isRepeating = false;
+
+    // Handle repeating case
+    if (isRepeating) {
+      output.totalSeconds[0] = totalSeconds[0];
+      output.nanos[0] = nanos[0];
+      output.isNull[0] = isNull[0];
+      output.isRepeating = true;
+      return;
+    }
+
+    // Handle normal case
+
+    // Copy data values over
+    if (selectedInUse) {
+      for (int j = 0; j < size; j++) {
+        int i = sel[j];
+        output.totalSeconds[i] = totalSeconds[i];
+        output.nanos[i] = nanos[i];
+      }
+    }
+    else {
+      System.arraycopy(totalSeconds, 0, output.totalSeconds, 0, size);
+      System.arraycopy(nanos, 0, output.nanos, 0, size);
+    }
+
+    // Copy nulls over if needed
+    if (!noNulls) {
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          output.isNull[i] = isNull[i];
+        }
+      }
+      else {
+        System.arraycopy(isNull, 0, output.isNull, 0, size);
+      }
+    }
+  }
+
+  /**
+   * Fill all the vector entries with a HiveIntervalDayTime.
+   * @param intervalDayTime
+   */
+  public void fill(HiveIntervalDayTime intervalDayTime) {
+    noNulls = true;
+    isRepeating = true;
+    totalSeconds[0] = intervalDayTime.getTotalSeconds();
+    nanos[0] = intervalDayTime.getNanos();
+  }
+
+  /**
+   * Return a convenience writable object stored by this column vector.
+   * Supports keeping a TimestampWritable object without having to import that definition...
+   * @return
+   */
+  public Writable getScratchWritable() {
+    return scratchWritable;
+  }
+
+  /**
+   * Set the convenience writable object stored by this column vector
+   * @param scratchWritable
+   */
+  public void setScratchWritable(Writable scratchWritable) {
+    this.scratchWritable = scratchWritable;
+  }
+
+  @Override
+  public void stringifyValue(StringBuilder buffer, int row) {
+    if (isRepeating) {
+      row = 0;
+    }
+    if (noNulls || !isNull[row]) {
+      scratchIntervalDayTime.set(totalSeconds[row], nanos[row]);
+      buffer.append(scratchIntervalDayTime.toString());
+    } else {
+      buffer.append("null");
+    }
+  }
+}
\ No newline at end of file


[02/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
new file mode 100644
index 0000000..059b7e8
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
@@ -0,0 +1,1026 @@
+PREHOOK: query: create table unique_timestamps (tsval timestamp) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@unique_timestamps
+POSTHOOK: query: create table unique_timestamps (tsval timestamp) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@unique_timestamps
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/timestamps.txt' OVERWRITE INTO TABLE unique_timestamps
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@unique_timestamps
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/timestamps.txt' OVERWRITE INTO TABLE unique_timestamps
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@unique_timestamps
+PREHOOK: query: create table interval_arithmetic_1 (dateval date, tsval timestamp) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: query: create table interval_arithmetic_1 (dateval date, tsval timestamp) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@interval_arithmetic_1
+PREHOOK: query: insert overwrite table interval_arithmetic_1
+  select cast(tsval as date), tsval from unique_timestamps
+PREHOOK: type: QUERY
+PREHOOK: Input: default@unique_timestamps
+PREHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: query: insert overwrite table interval_arithmetic_1
+  select cast(tsval as date), tsval from unique_timestamps
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@unique_timestamps
+POSTHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: Lineage: interval_arithmetic_1.dateval EXPRESSION [(unique_timestamps)unique_timestamps.FieldSchema(name:tsval, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: interval_arithmetic_1.tsval SIMPLE [(unique_timestamps)unique_timestamps.FieldSchema(name:tsval, type:timestamp, comment:null), ]
+_c0	tsval
+PREHOOK: query: -- interval year-month arithmetic
+explain
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: -- interval year-month arithmetic
+explain
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: dateval (type: date), (dateval - 2-2) (type: date), (dateval - -2-2) (type: date), (dateval + 2-2) (type: date), (dateval + -2-2) (type: date), (-2-2 + dateval) (type: date), (2-2 + dateval) (type: date)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: date)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: date), VALUE._col2 (type: date), VALUE._col3 (type: date), VALUE._col4 (type: date), VALUE._col5 (type: date)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+          Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	c1	c2	c3	c4	c5	c6
+0004-09-22	0002-07-22	0006-11-22	0006-11-22	0002-07-22	0002-07-22	0006-11-22
+0528-10-27	0526-08-27	0530-12-27	0530-12-27	0526-08-27	0526-08-27	0530-12-27
+1319-02-02	1316-12-02	1321-04-02	1321-04-02	1316-12-02	1316-12-02	1321-04-02
+1404-07-23	1402-05-23	1406-09-23	1406-09-23	1402-05-23	1402-05-23	1406-09-23
+1815-05-06	1813-03-06	1817-07-06	1817-07-06	1813-03-06	1813-03-06	1817-07-06
+1883-04-17	1881-02-17	1885-06-17	1885-06-17	1881-02-17	1881-02-17	1885-06-17
+1966-08-16	1964-06-16	1968-10-16	1968-10-16	1964-06-16	1964-06-16	1968-10-16
+1973-04-17	1971-02-17	1975-06-17	1975-06-17	1971-02-17	1971-02-17	1975-06-17
+1974-10-04	1972-08-04	1976-12-04	1976-12-04	1972-08-04	1972-08-04	1976-12-04
+1976-03-03	1974-01-03	1978-05-03	1978-05-03	1974-01-03	1974-01-03	1978-05-03
+1976-05-06	1974-03-06	1978-07-06	1978-07-06	1974-03-06	1974-03-06	1978-07-06
+1978-08-05	1976-06-05	1980-10-05	1980-10-05	1976-06-05	1976-06-05	1980-10-05
+1981-04-25	1979-02-25	1983-06-25	1983-06-25	1979-02-25	1979-02-25	1983-06-25
+1981-11-15	1979-09-15	1984-01-15	1984-01-15	1979-09-15	1979-09-15	1984-01-15
+1985-07-20	1983-05-20	1987-09-20	1987-09-20	1983-05-20	1983-05-20	1987-09-20
+1985-11-18	1983-09-18	1988-01-18	1988-01-18	1983-09-18	1983-09-18	1988-01-18
+1987-02-21	1984-12-21	1989-04-21	1989-04-21	1984-12-21	1984-12-21	1989-04-21
+1987-05-28	1985-03-28	1989-07-28	1989-07-28	1985-03-28	1985-03-28	1989-07-28
+1998-10-16	1996-08-16	2000-12-16	2000-12-16	1996-08-16	1996-08-16	2000-12-16
+1999-10-03	1997-08-03	2001-12-03	2001-12-03	1997-08-03	1997-08-03	2001-12-03
+2000-12-18	1998-10-18	2003-02-18	2003-02-18	1998-10-18	1998-10-18	2003-02-18
+2002-05-10	2000-03-10	2004-07-10	2004-07-10	2000-03-10	2000-03-10	2004-07-10
+2003-09-23	2001-07-23	2005-11-23	2005-11-23	2001-07-23	2001-07-23	2005-11-23
+2004-03-07	2002-01-07	2006-05-07	2006-05-07	2002-01-07	2002-01-07	2006-05-07
+2007-02-09	2004-12-09	2009-04-09	2009-04-09	2004-12-09	2004-12-09	2009-04-09
+2009-01-21	2006-11-21	2011-03-21	2011-03-21	2006-11-21	2006-11-21	2011-03-21
+2010-04-08	2008-02-08	2012-06-08	2012-06-08	2008-02-08	2008-02-08	2012-06-08
+2013-04-07	2011-02-07	2015-06-07	2015-06-07	2011-02-07	2011-02-07	2015-06-07
+2013-04-10	2011-02-10	2015-06-10	2015-06-10	2011-02-10	2011-02-10	2015-06-10
+2021-09-24	2019-07-24	2023-11-24	2023-11-24	2019-07-24	2019-07-24	2023-11-24
+2024-11-11	2022-09-11	2027-01-11	2027-01-11	2022-09-11	2022-09-11	2027-01-11
+4143-07-08	4141-05-08	4145-09-08	4145-09-08	4141-05-08	4141-05-08	4145-09-08
+4966-12-04	4964-10-04	4969-02-04	4969-02-04	4964-10-04	4964-10-04	4969-02-04
+5339-02-01	5336-12-01	5341-04-01	5341-04-01	5336-12-01	5336-12-01	5341-04-01
+5344-10-04	5342-08-04	5346-12-04	5346-12-04	5342-08-04	5342-08-04	5346-12-04
+5397-07-13	5395-05-13	5399-09-13	5399-09-13	5395-05-13	5395-05-13	5399-09-13
+5966-07-09	5964-05-09	5968-09-09	5968-09-09	5964-05-09	5964-05-09	5968-09-09
+6229-06-28	6227-04-28	6231-08-28	6231-08-28	6227-04-28	6227-04-28	6231-08-28
+6482-04-27	6480-02-27	6484-06-27	6484-06-27	6480-02-27	6480-02-27	6484-06-27
+6631-11-13	6629-09-13	6634-01-13	6634-01-13	6629-09-13	6629-09-13	6634-01-13
+6705-09-28	6703-07-28	6707-11-28	6707-11-28	6703-07-28	6703-07-28	6707-11-28
+6731-02-12	6728-12-12	6733-04-12	6733-04-12	6728-12-12	6728-12-12	6733-04-12
+7160-12-02	7158-10-02	7163-02-02	7163-02-02	7158-10-02	7158-10-02	7163-02-02
+7409-09-07	7407-07-07	7411-11-07	7411-11-07	7407-07-07	7407-07-07	7411-11-07
+7503-06-23	7501-04-23	7505-08-23	7505-08-23	7501-04-23	7501-04-23	7505-08-23
+8422-07-22	8420-05-22	8424-09-22	8424-09-22	8420-05-22	8420-05-22	8424-09-22
+8521-01-16	8518-11-16	8523-03-16	8523-03-16	8518-11-16	8518-11-16	8523-03-16
+9075-06-13	9073-04-13	9077-08-13	9077-08-13	9073-04-13	9073-04-13	9077-08-13
+9209-11-11	9207-09-11	9212-01-11	9212-01-11	9207-09-11	9207-09-11	9212-01-11
+9403-01-09	9400-11-09	9405-03-09	9405-03-09	9400-11-09	9400-11-09	9405-03-09
+PREHOOK: query: explain
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: dateval (type: date), (dateval - 1999-06-07) (type: interval_day_time), (1999-06-07 - dateval) (type: interval_day_time), (dateval - dateval) (type: interval_day_time)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: date)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	c1	c2	c3
+0004-09-22	-728552 23:00:00.000000000	728552 23:00:00.000000000	0 00:00:00.000000000
+0528-10-27	-537126 23:00:00.000000000	537126 23:00:00.000000000	0 00:00:00.000000000
+1319-02-02	-248481 23:00:00.000000000	248481 23:00:00.000000000	0 00:00:00.000000000
+1404-07-23	-217263 23:00:00.000000000	217263 23:00:00.000000000	0 00:00:00.000000000
+1815-05-06	-67236 23:00:00.000000000	67236 23:00:00.000000000	0 00:00:00.000000000
+1883-04-17	-42418 23:00:00.000000000	42418 23:00:00.000000000	0 00:00:00.000000000
+1966-08-16	-11983 00:00:00.000000000	11983 00:00:00.000000000	0 00:00:00.000000000
+1973-04-17	-9546 23:00:00.000000000	9546 23:00:00.000000000	0 00:00:00.000000000
+1974-10-04	-9012 00:00:00.000000000	9012 00:00:00.000000000	0 00:00:00.000000000
+1976-03-03	-8495 23:00:00.000000000	8495 23:00:00.000000000	0 00:00:00.000000000
+1976-05-06	-8432 00:00:00.000000000	8432 00:00:00.000000000	0 00:00:00.000000000
+1978-08-05	-7611 00:00:00.000000000	7611 00:00:00.000000000	0 00:00:00.000000000
+1981-04-25	-6616 23:00:00.000000000	6616 23:00:00.000000000	0 00:00:00.000000000
+1981-11-15	-6412 23:00:00.000000000	6412 23:00:00.000000000	0 00:00:00.000000000
+1985-07-20	-5070 00:00:00.000000000	5070 00:00:00.000000000	0 00:00:00.000000000
+1985-11-18	-4948 23:00:00.000000000	4948 23:00:00.000000000	0 00:00:00.000000000
+1987-02-21	-4488 23:00:00.000000000	4488 23:00:00.000000000	0 00:00:00.000000000
+1987-05-28	-4393 00:00:00.000000000	4393 00:00:00.000000000	0 00:00:00.000000000
+1998-10-16	-234 00:00:00.000000000	234 00:00:00.000000000	0 00:00:00.000000000
+1999-10-03	118 00:00:00.000000000	-118 00:00:00.000000000	0 00:00:00.000000000
+2000-12-18	560 01:00:00.000000000	-560 01:00:00.000000000	0 00:00:00.000000000
+2002-05-10	1068 00:00:00.000000000	-1068 00:00:00.000000000	0 00:00:00.000000000
+2003-09-23	1569 00:00:00.000000000	-1569 00:00:00.000000000	0 00:00:00.000000000
+2004-03-07	1735 01:00:00.000000000	-1735 01:00:00.000000000	0 00:00:00.000000000
+2007-02-09	2804 01:00:00.000000000	-2804 01:00:00.000000000	0 00:00:00.000000000
+2009-01-21	3516 01:00:00.000000000	-3516 01:00:00.000000000	0 00:00:00.000000000
+2010-04-08	3958 00:00:00.000000000	-3958 00:00:00.000000000	0 00:00:00.000000000
+2013-04-07	5053 00:00:00.000000000	-5053 00:00:00.000000000	0 00:00:00.000000000
+2013-04-10	5056 00:00:00.000000000	-5056 00:00:00.000000000	0 00:00:00.000000000
+2021-09-24	8145 00:00:00.000000000	-8145 00:00:00.000000000	0 00:00:00.000000000
+2024-11-11	9289 01:00:00.000000000	-9289 01:00:00.000000000	0 00:00:00.000000000
+4143-07-08	783111 00:00:00.000000000	-783111 00:00:00.000000000	0 00:00:00.000000000
+4966-12-04	1083855 01:00:00.000000000	-1083855 01:00:00.000000000	0 00:00:00.000000000
+5339-02-01	1219784 01:00:00.000000000	-1219784 01:00:00.000000000	0 00:00:00.000000000
+5344-10-04	1221856 00:00:00.000000000	-1221856 00:00:00.000000000	0 00:00:00.000000000
+5397-07-13	1241131 00:00:00.000000000	-1241131 00:00:00.000000000	0 00:00:00.000000000
+5966-07-09	1448949 00:00:00.000000000	-1448949 00:00:00.000000000	0 00:00:00.000000000
+6229-06-28	1544997 00:00:00.000000000	-1544997 00:00:00.000000000	0 00:00:00.000000000
+6482-04-27	1637342 00:00:00.000000000	-1637342 00:00:00.000000000	0 00:00:00.000000000
+6631-11-13	1691962 01:00:00.000000000	-1691962 01:00:00.000000000	0 00:00:00.000000000
+6705-09-28	1718944 00:00:00.000000000	-1718944 00:00:00.000000000	0 00:00:00.000000000
+6731-02-12	1728212 01:00:00.000000000	-1728212 01:00:00.000000000	0 00:00:00.000000000
+7160-12-02	1885195 01:00:00.000000000	-1885195 01:00:00.000000000	0 00:00:00.000000000
+7409-09-07	1976054 00:00:00.000000000	-1976054 00:00:00.000000000	0 00:00:00.000000000
+7503-06-23	2010310 00:00:00.000000000	-2010310 00:00:00.000000000	0 00:00:00.000000000
+8422-07-22	2345998 00:00:00.000000000	-2345998 00:00:00.000000000	0 00:00:00.000000000
+8521-01-16	2381970 01:00:00.000000000	-2381970 01:00:00.000000000	0 00:00:00.000000000
+9075-06-13	2584462 00:00:00.000000000	-2584462 00:00:00.000000000	0 00:00:00.000000000
+9209-11-11	2633556 01:00:00.000000000	-2633556 01:00:00.000000000	0 00:00:00.000000000
+9403-01-09	2704106 01:00:00.000000000	-2704106 01:00:00.000000000	0 00:00:00.000000000
+PREHOOK: query: explain
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: tsval (type: timestamp), (tsval - 2-2) (type: timestamp), (tsval - -2-2) (type: timestamp), (tsval + 2-2) (type: timestamp), (tsval + -2-2) (type: timestamp), (-2-2 + tsval) (type: timestamp), (2-2 + tsval) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: timestamp)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+          Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+tsval	c1	c2	c3	c4	c5	c6
+0004-09-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222
+0528-10-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273
+1319-02-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778	1321-04-02 16:31:57.778	1316-12-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778
+1404-07-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026
+1815-05-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705
+1883-04-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229
+1966-08-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031
+1973-04-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156	1975-06-17 07:30:38.596784156	1971-02-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156
+1974-10-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989	1976-12-04 16:21:03.989	1972-08-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989
+1976-03-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162	1978-05-03 05:54:33.000895162	1974-01-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162
+1976-05-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948
+1978-08-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501	1980-10-05 14:41:05.501	1976-06-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501
+1981-04-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689	1983-06-25 10:01:12.077192689	1979-02-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689
+1981-11-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387
+1985-07-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11	1987-09-20 09:30:11	1983-05-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11
+1985-11-18 16:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54	1988-01-18 16:37:54	1983-09-18 17:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54
+1987-02-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29	1989-04-21 20:48:29	1984-12-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29
+1987-05-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635
+1998-10-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987	2000-12-16 19:05:29.397591987	1996-08-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987
+1999-10-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939	2001-12-03 15:59:10.396903939	1997-08-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939
+2000-12-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596
+2002-05-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073
+2003-09-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252	2005-11-23 21:33:17.00003252	2001-07-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252
+2004-03-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13	2006-05-07 21:14:13	2002-01-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13
+2007-02-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876	2009-04-09 06:17:29.368756876	2004-12-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876
+2009-01-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108	2011-03-21 11:49:07.108	2006-11-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108
+2010-04-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727
+2013-04-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821
+2013-04-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546
+2021-09-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165	2023-11-24 02:18:32.413655165	2019-07-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165
+2024-11-11 16:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101	2027-01-11 16:42:41.101	2022-09-11 17:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101
+4143-07-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259
+4966-12-04 09:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202	4969-02-04 09:30:55.202	4964-10-04 10:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202
+5339-02-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691	5341-04-01 15:10:01.085678691	5336-12-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691
+5344-10-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165	5346-12-04 17:40:08.165	5342-08-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165
+5397-07-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438
+5966-07-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597	5968-09-09 03:30:50.597	5964-05-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597
+6229-06-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179
+6482-04-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413
+6631-11-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248
+6705-09-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672	6707-11-28 17:27:28.000845672	6703-07-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672
+6731-02-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702	6733-04-12 09:12:48.287783702	6728-12-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702
+7160-12-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852
+7409-09-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602	7411-11-07 22:33:32.459349602	7407-07-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602
+7503-06-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486	7505-08-23 23:14:17.486	7501-04-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486
+8422-07-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084
+8521-01-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388	8523-03-16 21:42:05.668832388	8518-11-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388
+9075-06-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797
+9209-11-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453
+9403-01-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547	9405-03-09 18:12:33.547	9400-11-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547
+PREHOOK: query: explain
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: 5-5 (type: interval_year_month)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: 5-5 (type: interval_year_month), -1-1 (type: interval_year_month)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+          Limit
+            Number of rows: 2
+            Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 2
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+c0	c1
+5-5	-1-1
+5-5	-1-1
+PREHOOK: query: -- interval day-time arithmetic
+explain
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: -- interval day-time arithmetic
+explain
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: dateval (type: date), (dateval - 99 11:22:33.123456789) (type: timestamp), (dateval - -99 11:22:33.123456789) (type: timestamp), (dateval + 99 11:22:33.123456789) (type: timestamp), (dateval + -99 11:22:33.123456789) (type: timestamp), (-99 11:22:33.123456789 + dateval) (type: timestamp), (99 11:22:33.123456789 + dateval) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: date)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+          Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	c1	c2	c3	c4	c5	c6
+0004-09-22	0004-06-14 12:37:26.876543211	0004-12-30 11:22:33.123456789	0004-12-30 11:22:33.123456789	0004-06-14 12:37:26.876543211	0004-06-14 12:37:26.876543211	0004-12-30 11:22:33.123456789
+0528-10-27	0528-07-19 12:37:26.876543211	0529-02-03 11:22:33.123456789	0529-02-03 11:22:33.123456789	0528-07-19 12:37:26.876543211	0528-07-19 12:37:26.876543211	0529-02-03 11:22:33.123456789
+1319-02-02	1318-10-25 12:37:26.876543211	1319-05-12 11:22:33.123456789	1319-05-12 11:22:33.123456789	1318-10-25 12:37:26.876543211	1318-10-25 12:37:26.876543211	1319-05-12 11:22:33.123456789
+1404-07-23	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789	1404-10-30 11:22:33.123456789	1404-04-14 12:37:26.876543211	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789
+1815-05-06	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789	1815-08-13 11:22:33.123456789	1815-01-26 12:37:26.876543211	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789
+1883-04-17	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789	1883-07-25 11:22:33.123456789	1883-01-07 12:37:26.876543211	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789
+1966-08-16	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789	1966-11-23 10:22:33.123456789	1966-05-08 12:37:26.876543211	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789
+1973-04-17	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789	1973-07-25 12:22:33.123456789	1973-01-07 12:37:26.876543211	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789
+1974-10-04	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789	1975-01-11 10:22:33.123456789	1974-06-26 12:37:26.876543211	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789
+1976-03-03	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789	1976-06-10 12:22:33.123456789	1975-11-24 12:37:26.876543211	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789
+1976-05-06	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789	1976-08-13 11:22:33.123456789	1976-01-27 11:37:26.876543211	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789
+1978-08-05	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789	1978-11-12 10:22:33.123456789	1978-04-27 11:37:26.876543211	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789
+1981-04-25	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789	1981-08-02 12:22:33.123456789	1981-01-15 12:37:26.876543211	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789
+1981-11-15	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789	1982-02-22 11:22:33.123456789	1981-08-07 13:37:26.876543211	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789
+1985-07-20	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789	1985-10-27 10:22:33.123456789	1985-04-11 11:37:26.876543211	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789
+1985-11-18	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789	1986-02-25 11:22:33.123456789	1985-08-10 13:37:26.876543211	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789
+1987-02-21	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789	1987-05-31 12:22:33.123456789	1986-11-13 12:37:26.876543211	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789
+1987-05-28	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789	1987-09-04 11:22:33.123456789	1987-02-17 11:37:26.876543211	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789
+1998-10-16	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789	1999-01-23 10:22:33.123456789	1998-07-08 12:37:26.876543211	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789
+1999-10-03	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789	2000-01-10 10:22:33.123456789	1999-06-25 12:37:26.876543211	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789
+2000-12-18	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789	2001-03-27 11:22:33.123456789	2000-09-09 13:37:26.876543211	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789
+2002-05-10	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789	2002-08-17 11:22:33.123456789	2002-01-30 11:37:26.876543211	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789
+2003-09-23	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789	2003-12-31 10:22:33.123456789	2003-06-15 12:37:26.876543211	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789
+2004-03-07	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789	2004-06-14 12:22:33.123456789	2003-11-28 12:37:26.876543211	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789
+2007-02-09	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789	2007-05-19 12:22:33.123456789	2006-11-01 12:37:26.876543211	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789
+2009-01-21	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789	2009-04-30 12:22:33.123456789	2008-10-13 13:37:26.876543211	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789
+2010-04-08	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789	2010-07-16 11:22:33.123456789	2009-12-29 11:37:26.876543211	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789
+2013-04-07	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789	2013-07-15 11:22:33.123456789	2012-12-28 11:37:26.876543211	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789
+2013-04-10	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789	2013-07-18 11:22:33.123456789	2012-12-31 11:37:26.876543211	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789
+2021-09-24	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789	2022-01-01 10:22:33.123456789	2021-06-16 12:37:26.876543211	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789
+2024-11-11	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789	2025-02-18 11:22:33.123456789	2024-08-03 13:37:26.876543211	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789
+4143-07-08	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789	4143-10-15 11:22:33.123456789	4143-03-30 12:37:26.876543211	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789
+4966-12-04	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789	4967-03-13 12:22:33.123456789	4966-08-26 13:37:26.876543211	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789
+5339-02-01	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789	5339-05-11 12:22:33.123456789	5338-10-24 13:37:26.876543211	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789
+5344-10-04	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789	5345-01-11 10:22:33.123456789	5344-06-26 12:37:26.876543211	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789
+5397-07-13	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789	5397-10-20 11:22:33.123456789	5397-04-04 12:37:26.876543211	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789
+5966-07-09	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789	5966-10-16 11:22:33.123456789	5966-03-31 12:37:26.876543211	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789
+6229-06-28	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789	6229-10-05 11:22:33.123456789	6229-03-20 12:37:26.876543211	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789
+6482-04-27	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789	6482-08-04 11:22:33.123456789	6482-01-17 11:37:26.876543211	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789
+6631-11-13	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789	6632-02-20 11:22:33.123456789	6631-08-05 13:37:26.876543211	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789
+6705-09-28	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789	6706-01-05 10:22:33.123456789	6705-06-20 12:37:26.876543211	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789
+6731-02-12	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789	6731-05-22 12:22:33.123456789	6730-11-04 12:37:26.876543211	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789
+7160-12-02	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789	7161-03-11 11:22:33.123456789	7160-08-24 13:37:26.876543211	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789
+7409-09-07	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789	7409-12-15 10:22:33.123456789	7409-05-30 12:37:26.876543211	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789
+7503-06-23	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789	7503-09-30 11:22:33.123456789	7503-03-15 12:37:26.876543211	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789
+8422-07-22	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789	8422-10-29 11:22:33.123456789	8422-04-13 12:37:26.876543211	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789
+8521-01-16	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789	8521-04-25 12:22:33.123456789	8520-10-08 13:37:26.876543211	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789
+9075-06-13	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789	9075-09-20 11:22:33.123456789	9075-03-05 11:37:26.876543211	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789
+9209-11-11	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789	9210-02-18 11:22:33.123456789	9209-08-03 13:37:26.876543211	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789
+9403-01-09	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789	9403-04-18 12:22:33.123456789	9402-10-01 13:37:26.876543211	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789
+PREHOOK: query: explain
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: dateval (type: date), tsval (type: timestamp), (dateval - tsval) (type: interval_day_time), (tsval - dateval) (type: interval_day_time), (tsval - tsval) (type: interval_day_time)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4
+              Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: date)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: timestamp), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: timestamp), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4
+          Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	tsval	c2	c3	c4
+0004-09-22	0004-09-22 18:26:29.519542222	-0 18:26:30.519542222	0 18:26:30.519542222	0 00:00:00.000000000
+0528-10-27	0528-10-27 08:15:18.941718273	-0 08:15:19.941718273	0 08:15:19.941718273	0 00:00:00.000000000
+1319-02-02	1319-02-02 16:31:57.778	-0 16:31:58.778000000	0 16:31:58.778000000	0 00:00:00.000000000
+1404-07-23	1404-07-23 15:32:16.059185026	-0 15:32:17.059185026	0 15:32:17.059185026	0 00:00:00.000000000
+1815-05-06	1815-05-06 00:12:37.543584705	-0 00:12:38.543584705	0 00:12:38.543584705	0 00:00:00.000000000
+1883-04-17	1883-04-17 04:14:34.647766229	-0 04:14:35.647766229	0 04:14:35.647766229	0 00:00:00.000000000
+1966-08-16	1966-08-16 13:36:50.183618031	-0 13:36:51.183618031	0 13:36:51.183618031	0 00:00:00.000000000
+1973-04-17	1973-04-17 06:30:38.596784156	-0 06:30:38.596784156	0 06:30:38.596784156	0 00:00:00.000000000
+1974-10-04	1974-10-04 17:21:03.989	-0 17:21:03.989000000	0 17:21:03.989000000	0 00:00:00.000000000
+1976-03-03	1976-03-03 04:54:33.000895162	-0 04:54:33.000895162	0 04:54:33.000895162	0 00:00:00.000000000
+1976-05-06	1976-05-06 00:42:30.910786948	-0 00:42:30.910786948	0 00:42:30.910786948	0 00:00:00.000000000
+1978-08-05	1978-08-05 14:41:05.501	-0 14:41:05.501000000	0 14:41:05.501000000	0 00:00:00.000000000
+1981-04-25	1981-04-25 09:01:12.077192689	-0 09:01:12.077192689	0 09:01:12.077192689	0 00:00:00.000000000
+1981-11-15	1981-11-15 23:03:10.999338387	-0 23:03:10.999338387	0 23:03:10.999338387	0 00:00:00.000000000
+1985-07-20	1985-07-20 09:30:11	-0 09:30:11.000000000	0 09:30:11.000000000	0 00:00:00.000000000
+1985-11-18	1985-11-18 16:37:54	-0 16:37:54.000000000	0 16:37:54.000000000	0 00:00:00.000000000
+1987-02-21	1987-02-21 19:48:29	-0 19:48:29.000000000	0 19:48:29.000000000	0 00:00:00.000000000
+1987-05-28	1987-05-28 13:52:07.900916635	-0 13:52:07.900916635	0 13:52:07.900916635	0 00:00:00.000000000
+1998-10-16	1998-10-16 20:05:29.397591987	-0 20:05:29.397591987	0 20:05:29.397591987	0 00:00:00.000000000
+1999-10-03	1999-10-03 16:59:10.396903939	-0 16:59:10.396903939	0 16:59:10.396903939	0 00:00:00.000000000
+2000-12-18	2000-12-18 08:42:30.000595596	-0 08:42:30.000595596	0 08:42:30.000595596	0 00:00:00.000000000
+2002-05-10	2002-05-10 05:29:48.990818073	-0 05:29:48.990818073	0 05:29:48.990818073	0 00:00:00.000000000
+2003-09-23	2003-09-23 22:33:17.00003252	-0 22:33:17.000032520	0 22:33:17.000032520	0 00:00:00.000000000
+2004-03-07	2004-03-07 20:14:13	-0 20:14:13.000000000	0 20:14:13.000000000	0 00:00:00.000000000
+2007-02-09	2007-02-09 05:17:29.368756876	-0 05:17:29.368756876	0 05:17:29.368756876	0 00:00:00.000000000
+2009-01-21	2009-01-21 10:49:07.108	-0 10:49:07.108000000	0 10:49:07.108000000	0 00:00:00.000000000
+2010-04-08	2010-04-08 02:43:35.861742727	-0 02:43:35.861742727	0 02:43:35.861742727	0 00:00:00.000000000
+2013-04-07	2013-04-07 02:44:43.00086821	-0 02:44:43.000868210	0 02:44:43.000868210	0 00:00:00.000000000
+2013-04-10	2013-04-10 00:43:46.854731546	-0 00:43:46.854731546	0 00:43:46.854731546	0 00:00:00.000000000
+2021-09-24	2021-09-24 03:18:32.413655165	-0 03:18:32.413655165	0 03:18:32.413655165	0 00:00:00.000000000
+2024-11-11	2024-11-11 16:42:41.101	-0 16:42:41.101000000	0 16:42:41.101000000	0 00:00:00.000000000
+4143-07-08	4143-07-08 10:53:27.252802259	-0 10:53:27.252802259	0 10:53:27.252802259	0 00:00:00.000000000
+4966-12-04	4966-12-04 09:30:55.202	-0 09:30:55.202000000	0 09:30:55.202000000	0 00:00:00.000000000
+5339-02-01	5339-02-01 14:10:01.085678691	-0 14:10:01.085678691	0 14:10:01.085678691	0 00:00:00.000000000
+5344-10-04	5344-10-04 18:40:08.165	-0 18:40:08.165000000	0 18:40:08.165000000	0 00:00:00.000000000
+5397-07-13	5397-07-13 07:12:32.000896438	-0 07:12:32.000896438	0 07:12:32.000896438	0 00:00:00.000000000
+5966-07-09	5966-07-09 03:30:50.597	-0 03:30:50.597000000	0 03:30:50.597000000	0 00:00:00.000000000
+6229-06-28	6229-06-28 02:54:28.970117179	-0 02:54:28.970117179	0 02:54:28.970117179	0 00:00:00.000000000
+6482-04-27	6482-04-27 12:07:38.073915413	-0 12:07:38.073915413	0 12:07:38.073915413	0 00:00:00.000000000
+6631-11-13	6631-11-13 16:31:29.702202248	-0 16:31:29.702202248	0 16:31:29.702202248	0 00:00:00.000000000
+6705-09-28	6705-09-28 18:27:28.000845672	-0 18:27:28.000845672	0 18:27:28.000845672	0 00:00:00.000000000
+6731-02-12	6731-02-12 08:12:48.287783702	-0 08:12:48.287783702	0 08:12:48.287783702	0 00:00:00.000000000
+7160-12-02	7160-12-02 06:00:24.81200852	-0 06:00:24.812008520	0 06:00:24.812008520	0 00:00:00.000000000
+7409-09-07	7409-09-07 23:33:32.459349602	-0 23:33:32.459349602	0 23:33:32.459349602	0 00:00:00.000000000
+7503-06-23	7503-06-23 23:14:17.486	-0 23:14:17.486000000	0 23:14:17.486000000	0 00:00:00.000000000
+8422-07-22	8422-07-22 03:21:45.745036084	-0 03:21:45.745036084	0 03:21:45.745036084	0 00:00:00.000000000
+8521-01-16	8521-01-16 20:42:05.668832388	-0 20:42:05.668832388	0 20:42:05.668832388	0 00:00:00.000000000
+9075-06-13	9075-06-13 16:20:09.218517797	-0 16:20:09.218517797	0 16:20:09.218517797	0 00:00:00.000000000
+9209-11-11	9209-11-11 04:08:58.223768453	-0 04:08:58.223768453	0 04:08:58.223768453	0 00:00:00.000000000
+9403-01-09	9403-01-09 18:12:33.547	-0 18:12:33.547000000	0 18:12:33.547000000	0 00:00:00.000000000
+PREHOOK: query: explain
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: tsval (type: timestamp), (tsval - 99 11:22:33.123456789) (type: timestamp), (tsval - -99 11:22:33.123456789) (type: timestamp), (tsval + 99 11:22:33.123456789) (type: timestamp), (tsval + -99 11:22:33.123456789) (type: timestamp), (-99 11:22:33.123456789 + tsval) (type: timestamp), (99 11:22:33.123456789 + tsval) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: timestamp)
+                sort order: +
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+          Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+tsval	c1	c2	c3	c4	c5	c6
+0004-09-22 18:26:29.519542222	0004-06-15 07:03:56.396085433	0004-12-31 05:49:02.642999011	0004-12-31 05:49:02.642999011	0004-06-15 07:03:56.396085433	0004-06-15 07:03:56.396085433	0004-12-31 05:49:02.642999011
+0528-10-27 08:15:18.941718273	0528-07-19 20:52:45.818261484	0529-02-03 19:37:52.065175062	0529-02-03 19:37:52.065175062	0528-07-19 20:52:45.818261484	0528-07-19 20:52:45.818261484	0529-02-03 19:37:52.065175062
+1319-02-02 16:31:57.778	1318-10-26 05:09:24.654543211	1319-05-13 03:54:30.901456789	1319-05-13 03:54:30.901456789	1318-10-26 05:09:24.654543211	1318-10-26 05:09:24.654543211	1319-05-13 03:54:30.901456789
+1404-07-23 15:32:16.059185026	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815	1404-10-31 02:54:49.182641815	1404-04-15 04:09:42.935728237	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815
+1815-05-06 00:12:37.543584705	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494	1815-08-13 11:35:10.667041494	1815-01-26 12:50:04.420127916	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494
+1883-04-17 04:14:34.647766229	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018	1883-07-25 15:37:07.771223018	1883-01-07 16:52:01.52430944	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018
+1966-08-16 13:36:50.183618031	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482	1966-11-23 23:59:23.30707482	1966-05-09 02:14:17.060161242	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482
+1973-04-17 06:30:38.596784156	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945	1973-07-25 18:53:11.720240945	1973-01-07 19:08:05.473327367	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945
+1974-10-04 17:21:03.989	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789	1975-01-12 03:43:37.112456789	1974-06-27 05:58:30.865543211	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789
+1976-03-03 04:54:33.000895162	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951	1976-06-10 17:17:06.124351951	1975-11-24 17:31:59.877438373	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951
+1976-05-06 00:42:30.910786948	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737	1976-08-13 12:05:04.034243737	1976-01-27 12:19:57.787330159	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737
+1978-08-05 14:41:05.501	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789	1978-11-13 01:03:38.624456789	1978-04-28 02:18:32.377543211	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789
+1981-04-25 09:01:12.077192689	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478	1981-08-02 21:23:45.200649478	1981-01-15 21:38:38.9537359	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478
+1981-11-15 23:03:10.999338387	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176	1982-02-23 10:25:44.122795176	1981-08-08 12:40:37.875881598	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176
+1985-07-20 09:30:11	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789	1985-10-27 19:52:44.123456789	1985-04-11 21:07:37.876543211	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789
+1985-11-18 16:37:54	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789	1986-02-26 04:00:27.123456789	1985-08-11 06:15:20.876543211	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789
+1987-02-21 19:48:29	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789	1987-06-01 08:11:02.123456789	1986-11-14 08:25:55.876543211	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789
+1987-05-28 13:52:07.900916635	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424	1987-09-05 01:14:41.024373424	1987-02-18 01:29:34.777459846	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424
+1998-10-16 20:05:29.397591987	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776	1999-01-24 06:28:02.521048776	1998-07-09 08:42:56.274135198	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776
+1999-10-03 16:59:10.396903939	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728	2000-01-11 03:21:43.520360728	1999-06-26 05:36:37.27344715	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728
+2000-12-18 08:42:30.000595596	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385	2001-03-27 20:05:03.124052385	2000-09-09 22:19:56.877138807	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385
+2002-05-10 05:29:48.990818073	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862	2002-08-17 16:52:22.114274862	2002-01-30 17:07:15.867361284	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862
+2003-09-23 22:33:17.00003252	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309	2004-01-01 08:55:50.123489309	2003-06-16 11:10:43.876575731	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309
+2004-03-07 20:14:13	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789	2004-06-15 08:36:46.123456789	2003-11-29 08:51:39.876543211	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789
+2007-02-09 05:17:29.368756876	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665	2007-05-19 17:40:02.492213665	2006-11-01 17:54:56.245300087	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665
+2009-01-21 10:49:07.108	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789	2009-04-30 23:11:40.231456789	2008-10-14 00:26:33.984543211	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789
+2010-04-08 02:43:35.861742727	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516	2010-07-16 14:06:08.985199516	2009-12-29 14:21:02.738285938	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516
+2013-04-07 02:44:43.00086821	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999	2013-07-15 14:07:16.124324999	2012-12-28 14:22:09.877411421	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999
+2013-04-10 00:43:46.854731546	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335	2013-07-18 12:06:19.978188335	2012-12-31 12:21:13.731274757	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335
+2021-09-24 03:18:32.413655165	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954	2022-01-01 13:41:05.537111954	2021-06-16 15:55:59.290198376	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954
+2024-11-11 16:42:41.101	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789	2025-02-19 04:05:14.224456789	2024-08-04 06:20:07.977543211	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789
+4143-07-08 10:53:27.252802259	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048	4143-10-15 22:16:00.376259048	4143-03-30 23:30:54.12934547	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048
+4966-12-04 09:30:55.202	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789	4967-03-13 21:53:28.325456789	4966-08-26 23:08:22.078543211	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789
+5339-02-01 14:10:01.085678691	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548	5339-05-12 02:32:34.20913548	5338-10-25 03:47:27.962221902	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548
+5344-10-04 18:40:08.165	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789	5345-01-12 05:02:41.288456789	5344-06-27 07:17:35.041543211	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789
+5397-07-13 07:12:32.000896438	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227	5397-10-20 18:35:05.124353227	5397-04-04 19:49:58.877439649	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227
+5966-07-09 03:30:50.597	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789	5966-10-16 14:53:23.720456789	5966-03-31 16:08:17.473543211	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789
+6229-06-28 02:54:28.970117179	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968	6229-10-05 14:17:02.093573968	6229-03-20 15:31:55.84666039	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968
+6482-04-27 12:07:38.073915413	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202	6482-08-04 23:30:11.197372202	6482-01-17 23:45:04.950458624	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202
+6631-11-13 16:31:29.702202248	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037	6632-02-21 03:54:02.825659037	6631-08-06 06:08:56.578745459	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037
+6705-09-28 18:27:28.000845672	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461	6706-01-06 04:50:01.124302461	6705-06-21 07:04:54.877388883	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461
+6731-02-12 08:12:48.287783702	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491	6731-05-22 20:35:21.411240491	6730-11-04 20:50:15.164326913	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491
+7160-12-02 06:00:24.81200852	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309	7161-03-11 17:22:57.935465309	7160-08-24 19:37:51.688551731	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309
+7409-09-07 23:33:32.459349602	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391	7409-12-16 09:56:05.582806391	7409-05-31 12:10:59.335892813	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391
+7503-06-23 23:14:17.486	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789	7503-10-01 10:36:50.609456789	7503-03-16 11:51:44.362543211	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789
+8422-07-22 03:21:45.745036084	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873	8422-10-29 14:44:18.868492873	8422-04-13 15:59:12.621579295	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873
+8521-01-16 20:42:05.668832388	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177	8521-04-26 09:04:38.792289177	8520-10-09 10:19:32.545375599	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177
+9075-06-13 16:20:09.218517797	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586	9075-09-21 03:42:42.341974586	9075-03-06 03:57:36.095061008	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586
+9209-11-11 04:08:58.223768453	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242	9210-02-18 15:31:31.347225242	9209-08-03 17:46:25.100311664	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242
+9403-01-09 18:12:33.547	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789	9403-04-19 06:35:06.670456789	9402-10-02 07:50:00.423543211	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789
+PREHOOK: query: explain
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: interval_arithmetic_1
+            Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: 109 20:30:40.246913578 (type: interval_day_time), 89 02:14:26.000000000 (type: interval_day_time)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+              Limit
+                Number of rows: 2
+                Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 2
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+c0	c1
+109 20:30:40.246913578	89 02:14:26.000000000
+109 20:30:40.246913578	89 02:14:26.000000000
+PREHOOK: query: drop table interval_arithmetic_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@interval_arithmetic_1
+PREHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: query: drop table interval_arithmetic_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@interval_arithmetic_1
+POSTHOOK: Output: default@interval_arithmetic_1


[04/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
index 3b35d07..37eea4a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -175,13 +176,8 @@ public class TestVectorizedORCReader {
           } else if (a instanceof TimestampWritable) {
             // Timestamps are stored as long, so convert and compare
             TimestampWritable t = ((TimestampWritable) a);
-            // Timestamp.getTime() is overriden and is 
-            // long time = super.getTime();
-            // return (time + (nanos / 1000000));
-            Long timeInNanoSec = (t.getTimestamp().getTime() * 1000000)
-                + (t.getTimestamp().getNanos() % 1000000);
-            long b = ((LongColumnVector) cv).vector[rowId];
-            Assert.assertEquals(timeInNanoSec.toString(), Long.toString(b));
+            TimestampColumnVector tcv = ((TimestampColumnVector) cv);
+            Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
 
           } else if (a instanceof DateWritable) {
             // Dates are stored as long, so convert and compare

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/queries/clientpositive/vector_interval_arithmetic.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vector_interval_arithmetic.q b/ql/src/test/queries/clientpositive/vector_interval_arithmetic.q
new file mode 100644
index 0000000..40c4c03
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/vector_interval_arithmetic.q
@@ -0,0 +1,174 @@
+set hive.cli.print.header=true;
+set hive.explain.user=false;
+set hive.fetch.task.conversion=none;
+
+create table unique_timestamps (tsval timestamp) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../../data/files/timestamps.txt' OVERWRITE INTO TABLE unique_timestamps;
+
+create table interval_arithmetic_1 (dateval date, tsval timestamp) stored as orc;
+insert overwrite table interval_arithmetic_1
+  select cast(tsval as date), tsval from unique_timestamps;
+
+SET hive.vectorized.execution.enabled=true;
+
+-- interval year-month arithmetic
+explain
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval;
+
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval;
+
+explain
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval;
+
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval;
+
+explain
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval;
+
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval;
+
+explain
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2;
+
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2;
+
+
+-- interval day-time arithmetic
+explain
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval;
+
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval;
+
+explain
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval;
+
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval;
+
+explain
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval;
+
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval;
+
+explain
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2;
+
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2;
+
+drop table interval_arithmetic_1;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/queries/clientpositive/vectorized_timestamp.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorized_timestamp.q b/ql/src/test/queries/clientpositive/vectorized_timestamp.q
new file mode 100644
index 0000000..baf0cfa
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/vectorized_timestamp.q
@@ -0,0 +1,27 @@
+set hive.fetch.task.conversion=none;
+
+DROP TABLE IF EXISTS test;
+CREATE TABLE test(ts TIMESTAMP) STORED AS ORC;
+INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999');
+
+SET hive.vectorized.execution.enabled = false;
+EXPLAIN
+SELECT ts FROM test;
+
+SELECT ts FROM test;
+
+EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test;
+
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test;
+
+SET hive.vectorized.execution.enabled = true;
+EXPLAIN
+SELECT ts FROM test;
+
+SELECT ts FROM test;
+
+EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test;
+
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out
new file mode 100644
index 0000000..0ea8b8b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/tez/vector_interval_arithmetic.q.out
@@ -0,0 +1,1078 @@
+PREHOOK: query: create table unique_timestamps (tsval timestamp) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@unique_timestamps
+POSTHOOK: query: create table unique_timestamps (tsval timestamp) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@unique_timestamps
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/timestamps.txt' OVERWRITE INTO TABLE unique_timestamps
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@unique_timestamps
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/timestamps.txt' OVERWRITE INTO TABLE unique_timestamps
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@unique_timestamps
+PREHOOK: query: create table interval_arithmetic_1 (dateval date, tsval timestamp) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: query: create table interval_arithmetic_1 (dateval date, tsval timestamp) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@interval_arithmetic_1
+PREHOOK: query: insert overwrite table interval_arithmetic_1
+  select cast(tsval as date), tsval from unique_timestamps
+PREHOOK: type: QUERY
+PREHOOK: Input: default@unique_timestamps
+PREHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: query: insert overwrite table interval_arithmetic_1
+  select cast(tsval as date), tsval from unique_timestamps
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@unique_timestamps
+POSTHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: Lineage: interval_arithmetic_1.dateval EXPRESSION [(unique_timestamps)unique_timestamps.FieldSchema(name:tsval, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: interval_arithmetic_1.tsval SIMPLE [(unique_timestamps)unique_timestamps.FieldSchema(name:tsval, type:timestamp, comment:null), ]
+_c0	tsval
+PREHOOK: query: -- interval year-month arithmetic
+explain
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: -- interval year-month arithmetic
+explain
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: dateval (type: date), (dateval - 2-2) (type: date), (dateval - -2-2) (type: date), (dateval + 2-2) (type: date), (dateval + -2-2) (type: date), (-2-2 + dateval) (type: date), (2-2 + dateval) (type: date)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                    Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: date)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: date), VALUE._col2 (type: date), VALUE._col3 (type: date), VALUE._col4 (type: date), VALUE._col5 (type: date)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	c1	c2	c3	c4	c5	c6
+0004-09-22	0002-07-22	0006-11-22	0006-11-22	0002-07-22	0002-07-22	0006-11-22
+0528-10-27	0526-08-27	0530-12-27	0530-12-27	0526-08-27	0526-08-27	0530-12-27
+1319-02-02	1316-12-02	1321-04-02	1321-04-02	1316-12-02	1316-12-02	1321-04-02
+1404-07-23	1402-05-23	1406-09-23	1406-09-23	1402-05-23	1402-05-23	1406-09-23
+1815-05-06	1813-03-06	1817-07-06	1817-07-06	1813-03-06	1813-03-06	1817-07-06
+1883-04-17	1881-02-17	1885-06-17	1885-06-17	1881-02-17	1881-02-17	1885-06-17
+1966-08-16	1964-06-16	1968-10-16	1968-10-16	1964-06-16	1964-06-16	1968-10-16
+1973-04-17	1971-02-17	1975-06-17	1975-06-17	1971-02-17	1971-02-17	1975-06-17
+1974-10-04	1972-08-04	1976-12-04	1976-12-04	1972-08-04	1972-08-04	1976-12-04
+1976-03-03	1974-01-03	1978-05-03	1978-05-03	1974-01-03	1974-01-03	1978-05-03
+1976-05-06	1974-03-06	1978-07-06	1978-07-06	1974-03-06	1974-03-06	1978-07-06
+1978-08-05	1976-06-05	1980-10-05	1980-10-05	1976-06-05	1976-06-05	1980-10-05
+1981-04-25	1979-02-25	1983-06-25	1983-06-25	1979-02-25	1979-02-25	1983-06-25
+1981-11-15	1979-09-15	1984-01-15	1984-01-15	1979-09-15	1979-09-15	1984-01-15
+1985-07-20	1983-05-20	1987-09-20	1987-09-20	1983-05-20	1983-05-20	1987-09-20
+1985-11-18	1983-09-18	1988-01-18	1988-01-18	1983-09-18	1983-09-18	1988-01-18
+1987-02-21	1984-12-21	1989-04-21	1989-04-21	1984-12-21	1984-12-21	1989-04-21
+1987-05-28	1985-03-28	1989-07-28	1989-07-28	1985-03-28	1985-03-28	1989-07-28
+1998-10-16	1996-08-16	2000-12-16	2000-12-16	1996-08-16	1996-08-16	2000-12-16
+1999-10-03	1997-08-03	2001-12-03	2001-12-03	1997-08-03	1997-08-03	2001-12-03
+2000-12-18	1998-10-18	2003-02-18	2003-02-18	1998-10-18	1998-10-18	2003-02-18
+2002-05-10	2000-03-10	2004-07-10	2004-07-10	2000-03-10	2000-03-10	2004-07-10
+2003-09-23	2001-07-23	2005-11-23	2005-11-23	2001-07-23	2001-07-23	2005-11-23
+2004-03-07	2002-01-07	2006-05-07	2006-05-07	2002-01-07	2002-01-07	2006-05-07
+2007-02-09	2004-12-09	2009-04-09	2009-04-09	2004-12-09	2004-12-09	2009-04-09
+2009-01-21	2006-11-21	2011-03-21	2011-03-21	2006-11-21	2006-11-21	2011-03-21
+2010-04-08	2008-02-08	2012-06-08	2012-06-08	2008-02-08	2008-02-08	2012-06-08
+2013-04-07	2011-02-07	2015-06-07	2015-06-07	2011-02-07	2011-02-07	2015-06-07
+2013-04-10	2011-02-10	2015-06-10	2015-06-10	2011-02-10	2011-02-10	2015-06-10
+2021-09-24	2019-07-24	2023-11-24	2023-11-24	2019-07-24	2019-07-24	2023-11-24
+2024-11-11	2022-09-11	2027-01-11	2027-01-11	2022-09-11	2022-09-11	2027-01-11
+4143-07-08	4141-05-08	4145-09-08	4145-09-08	4141-05-08	4141-05-08	4145-09-08
+4966-12-04	4964-10-04	4969-02-04	4969-02-04	4964-10-04	4964-10-04	4969-02-04
+5339-02-01	5336-12-01	5341-04-01	5341-04-01	5336-12-01	5336-12-01	5341-04-01
+5344-10-04	5342-08-04	5346-12-04	5346-12-04	5342-08-04	5342-08-04	5346-12-04
+5397-07-13	5395-05-13	5399-09-13	5399-09-13	5395-05-13	5395-05-13	5399-09-13
+5966-07-09	5964-05-09	5968-09-09	5968-09-09	5964-05-09	5964-05-09	5968-09-09
+6229-06-28	6227-04-28	6231-08-28	6231-08-28	6227-04-28	6227-04-28	6231-08-28
+6482-04-27	6480-02-27	6484-06-27	6484-06-27	6480-02-27	6480-02-27	6484-06-27
+6631-11-13	6629-09-13	6634-01-13	6634-01-13	6629-09-13	6629-09-13	6634-01-13
+6705-09-28	6703-07-28	6707-11-28	6707-11-28	6703-07-28	6703-07-28	6707-11-28
+6731-02-12	6728-12-12	6733-04-12	6733-04-12	6728-12-12	6728-12-12	6733-04-12
+7160-12-02	7158-10-02	7163-02-02	7163-02-02	7158-10-02	7158-10-02	7163-02-02
+7409-09-07	7407-07-07	7411-11-07	7411-11-07	7407-07-07	7407-07-07	7411-11-07
+7503-06-23	7501-04-23	7505-08-23	7505-08-23	7501-04-23	7501-04-23	7505-08-23
+8422-07-22	8420-05-22	8424-09-22	8424-09-22	8420-05-22	8420-05-22	8424-09-22
+8521-01-16	8518-11-16	8523-03-16	8523-03-16	8518-11-16	8518-11-16	8523-03-16
+9075-06-13	9073-04-13	9077-08-13	9077-08-13	9073-04-13	9073-04-13	9077-08-13
+9209-11-11	9207-09-11	9212-01-11	9212-01-11	9207-09-11	9207-09-11	9212-01-11
+9403-01-09	9400-11-09	9405-03-09	9405-03-09	9400-11-09	9400-11-09	9405-03-09
+PREHOOK: query: explain
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: dateval (type: date), (dateval - 1999-06-07) (type: interval_day_time), (1999-06-07 - dateval) (type: interval_day_time), (dateval - dateval) (type: interval_day_time)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: date)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	c1	c2	c3
+0004-09-22	-728552 23:00:00.000000000	728552 23:00:00.000000000	0 00:00:00.000000000
+0528-10-27	-537126 23:00:00.000000000	537126 23:00:00.000000000	0 00:00:00.000000000
+1319-02-02	-248481 23:00:00.000000000	248481 23:00:00.000000000	0 00:00:00.000000000
+1404-07-23	-217263 23:00:00.000000000	217263 23:00:00.000000000	0 00:00:00.000000000
+1815-05-06	-67236 23:00:00.000000000	67236 23:00:00.000000000	0 00:00:00.000000000
+1883-04-17	-42418 23:00:00.000000000	42418 23:00:00.000000000	0 00:00:00.000000000
+1966-08-16	-11983 00:00:00.000000000	11983 00:00:00.000000000	0 00:00:00.000000000
+1973-04-17	-9546 23:00:00.000000000	9546 23:00:00.000000000	0 00:00:00.000000000
+1974-10-04	-9012 00:00:00.000000000	9012 00:00:00.000000000	0 00:00:00.000000000
+1976-03-03	-8495 23:00:00.000000000	8495 23:00:00.000000000	0 00:00:00.000000000
+1976-05-06	-8432 00:00:00.000000000	8432 00:00:00.000000000	0 00:00:00.000000000
+1978-08-05	-7611 00:00:00.000000000	7611 00:00:00.000000000	0 00:00:00.000000000
+1981-04-25	-6616 23:00:00.000000000	6616 23:00:00.000000000	0 00:00:00.000000000
+1981-11-15	-6412 23:00:00.000000000	6412 23:00:00.000000000	0 00:00:00.000000000
+1985-07-20	-5070 00:00:00.000000000	5070 00:00:00.000000000	0 00:00:00.000000000
+1985-11-18	-4948 23:00:00.000000000	4948 23:00:00.000000000	0 00:00:00.000000000
+1987-02-21	-4488 23:00:00.000000000	4488 23:00:00.000000000	0 00:00:00.000000000
+1987-05-28	-4393 00:00:00.000000000	4393 00:00:00.000000000	0 00:00:00.000000000
+1998-10-16	-234 00:00:00.000000000	234 00:00:00.000000000	0 00:00:00.000000000
+1999-10-03	118 00:00:00.000000000	-118 00:00:00.000000000	0 00:00:00.000000000
+2000-12-18	560 01:00:00.000000000	-560 01:00:00.000000000	0 00:00:00.000000000
+2002-05-10	1068 00:00:00.000000000	-1068 00:00:00.000000000	0 00:00:00.000000000
+2003-09-23	1569 00:00:00.000000000	-1569 00:00:00.000000000	0 00:00:00.000000000
+2004-03-07	1735 01:00:00.000000000	-1735 01:00:00.000000000	0 00:00:00.000000000
+2007-02-09	2804 01:00:00.000000000	-2804 01:00:00.000000000	0 00:00:00.000000000
+2009-01-21	3516 01:00:00.000000000	-3516 01:00:00.000000000	0 00:00:00.000000000
+2010-04-08	3958 00:00:00.000000000	-3958 00:00:00.000000000	0 00:00:00.000000000
+2013-04-07	5053 00:00:00.000000000	-5053 00:00:00.000000000	0 00:00:00.000000000
+2013-04-10	5056 00:00:00.000000000	-5056 00:00:00.000000000	0 00:00:00.000000000
+2021-09-24	8145 00:00:00.000000000	-8145 00:00:00.000000000	0 00:00:00.000000000
+2024-11-11	9289 01:00:00.000000000	-9289 01:00:00.000000000	0 00:00:00.000000000
+4143-07-08	783111 00:00:00.000000000	-783111 00:00:00.000000000	0 00:00:00.000000000
+4966-12-04	1083855 01:00:00.000000000	-1083855 01:00:00.000000000	0 00:00:00.000000000
+5339-02-01	1219784 01:00:00.000000000	-1219784 01:00:00.000000000	0 00:00:00.000000000
+5344-10-04	1221856 00:00:00.000000000	-1221856 00:00:00.000000000	0 00:00:00.000000000
+5397-07-13	1241131 00:00:00.000000000	-1241131 00:00:00.000000000	0 00:00:00.000000000
+5966-07-09	1448949 00:00:00.000000000	-1448949 00:00:00.000000000	0 00:00:00.000000000
+6229-06-28	1544997 00:00:00.000000000	-1544997 00:00:00.000000000	0 00:00:00.000000000
+6482-04-27	1637342 00:00:00.000000000	-1637342 00:00:00.000000000	0 00:00:00.000000000
+6631-11-13	1691962 01:00:00.000000000	-1691962 01:00:00.000000000	0 00:00:00.000000000
+6705-09-28	1718944 00:00:00.000000000	-1718944 00:00:00.000000000	0 00:00:00.000000000
+6731-02-12	1728212 01:00:00.000000000	-1728212 01:00:00.000000000	0 00:00:00.000000000
+7160-12-02	1885195 01:00:00.000000000	-1885195 01:00:00.000000000	0 00:00:00.000000000
+7409-09-07	1976054 00:00:00.000000000	-1976054 00:00:00.000000000	0 00:00:00.000000000
+7503-06-23	2010310 00:00:00.000000000	-2010310 00:00:00.000000000	0 00:00:00.000000000
+8422-07-22	2345998 00:00:00.000000000	-2345998 00:00:00.000000000	0 00:00:00.000000000
+8521-01-16	2381970 01:00:00.000000000	-2381970 01:00:00.000000000	0 00:00:00.000000000
+9075-06-13	2584462 00:00:00.000000000	-2584462 00:00:00.000000000	0 00:00:00.000000000
+9209-11-11	2633556 01:00:00.000000000	-2633556 01:00:00.000000000	0 00:00:00.000000000
+9403-01-09	2704106 01:00:00.000000000	-2704106 01:00:00.000000000	0 00:00:00.000000000
+PREHOOK: query: explain
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: tsval (type: timestamp), (tsval - 2-2) (type: timestamp), (tsval - -2-2) (type: timestamp), (tsval + 2-2) (type: timestamp), (tsval + -2-2) (type: timestamp), (-2-2 + tsval) (type: timestamp), (2-2 + tsval) (type: timestamp)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                    Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: timestamp)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+tsval	c1	c2	c3	c4	c5	c6
+0004-09-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222
+0528-10-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273
+1319-02-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778	1321-04-02 16:31:57.778	1316-12-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778
+1404-07-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026
+1815-05-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705
+1883-04-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229
+1966-08-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031
+1973-04-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156	1975-06-17 07:30:38.596784156	1971-02-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156
+1974-10-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989	1976-12-04 16:21:03.989	1972-08-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989
+1976-03-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162	1978-05-03 05:54:33.000895162	1974-01-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162
+1976-05-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948
+1978-08-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501	1980-10-05 14:41:05.501	1976-06-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501
+1981-04-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689	1983-06-25 10:01:12.077192689	1979-02-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689
+1981-11-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387
+1985-07-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11	1987-09-20 09:30:11	1983-05-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11
+1985-11-18 16:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54	1988-01-18 16:37:54	1983-09-18 17:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54
+1987-02-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29	1989-04-21 20:48:29	1984-12-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29
+1987-05-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635
+1998-10-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987	2000-12-16 19:05:29.397591987	1996-08-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987
+1999-10-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939	2001-12-03 15:59:10.396903939	1997-08-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939
+2000-12-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596
+2002-05-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073
+2003-09-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252	2005-11-23 21:33:17.00003252	2001-07-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252
+2004-03-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13	2006-05-07 21:14:13	2002-01-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13
+2007-02-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876	2009-04-09 06:17:29.368756876	2004-12-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876
+2009-01-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108	2011-03-21 11:49:07.108	2006-11-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108
+2010-04-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727
+2013-04-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821
+2013-04-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546
+2021-09-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165	2023-11-24 02:18:32.413655165	2019-07-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165
+2024-11-11 16:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101	2027-01-11 16:42:41.101	2022-09-11 17:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101
+4143-07-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259
+4966-12-04 09:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202	4969-02-04 09:30:55.202	4964-10-04 10:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202
+5339-02-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691	5341-04-01 15:10:01.085678691	5336-12-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691
+5344-10-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165	5346-12-04 17:40:08.165	5342-08-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165
+5397-07-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438
+5966-07-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597	5968-09-09 03:30:50.597	5964-05-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597
+6229-06-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179
+6482-04-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413
+6631-11-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248
+6705-09-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672	6707-11-28 17:27:28.000845672	6703-07-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672
+6731-02-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702	6733-04-12 09:12:48.287783702	6728-12-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702
+7160-12-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852
+7409-09-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602	7411-11-07 22:33:32.459349602	7407-07-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602
+7503-06-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486	7505-08-23 23:14:17.486	7501-04-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486
+8422-07-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084
+8521-01-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388	8523-03-16 21:42:05.668832388	8518-11-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388
+9075-06-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797
+9209-11-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453
+9403-01-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547	9405-03-09 18:12:33.547	9400-11-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547
+PREHOOK: query: explain
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: 5-5 (type: interval_year_month)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: 5-5 (type: interval_year_month), -1-1 (type: interval_year_month)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Limit
+                  Number of rows: 2
+                  Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 2
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+order by interval '2-2' year to month + interval '3-3' year to month
+limit 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+c0	c1
+5-5	-1-1
+5-5	-1-1
+PREHOOK: query: -- interval day-time arithmetic
+explain
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: -- interval day-time arithmetic
+explain
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: dateval (type: date), (dateval - 99 11:22:33.123456789) (type: timestamp), (dateval - -99 11:22:33.123456789) (type: timestamp), (dateval + 99 11:22:33.123456789) (type: timestamp), (dateval + -99 11:22:33.123456789) (type: timestamp), (-99 11:22:33.123456789 + dateval) (type: timestamp), (99 11:22:33.123456789 + dateval) (type: timestamp)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                    Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: date)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	c1	c2	c3	c4	c5	c6
+0004-09-22	0004-06-14 12:37:26.876543211	0004-12-30 11:22:33.123456789	0004-12-30 11:22:33.123456789	0004-06-14 12:37:26.876543211	0004-06-14 12:37:26.876543211	0004-12-30 11:22:33.123456789
+0528-10-27	0528-07-19 12:37:26.876543211	0529-02-03 11:22:33.123456789	0529-02-03 11:22:33.123456789	0528-07-19 12:37:26.876543211	0528-07-19 12:37:26.876543211	0529-02-03 11:22:33.123456789
+1319-02-02	1318-10-25 12:37:26.876543211	1319-05-12 11:22:33.123456789	1319-05-12 11:22:33.123456789	1318-10-25 12:37:26.876543211	1318-10-25 12:37:26.876543211	1319-05-12 11:22:33.123456789
+1404-07-23	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789	1404-10-30 11:22:33.123456789	1404-04-14 12:37:26.876543211	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789
+1815-05-06	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789	1815-08-13 11:22:33.123456789	1815-01-26 12:37:26.876543211	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789
+1883-04-17	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789	1883-07-25 11:22:33.123456789	1883-01-07 12:37:26.876543211	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789
+1966-08-16	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789	1966-11-23 10:22:33.123456789	1966-05-08 12:37:26.876543211	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789
+1973-04-17	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789	1973-07-25 12:22:33.123456789	1973-01-07 12:37:26.876543211	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789
+1974-10-04	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789	1975-01-11 10:22:33.123456789	1974-06-26 12:37:26.876543211	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789
+1976-03-03	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789	1976-06-10 12:22:33.123456789	1975-11-24 12:37:26.876543211	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789
+1976-05-06	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789	1976-08-13 11:22:33.123456789	1976-01-27 11:37:26.876543211	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789
+1978-08-05	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789	1978-11-12 10:22:33.123456789	1978-04-27 11:37:26.876543211	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789
+1981-04-25	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789	1981-08-02 12:22:33.123456789	1981-01-15 12:37:26.876543211	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789
+1981-11-15	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789	1982-02-22 11:22:33.123456789	1981-08-07 13:37:26.876543211	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789
+1985-07-20	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789	1985-10-27 10:22:33.123456789	1985-04-11 11:37:26.876543211	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789
+1985-11-18	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789	1986-02-25 11:22:33.123456789	1985-08-10 13:37:26.876543211	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789
+1987-02-21	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789	1987-05-31 12:22:33.123456789	1986-11-13 12:37:26.876543211	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789
+1987-05-28	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789	1987-09-04 11:22:33.123456789	1987-02-17 11:37:26.876543211	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789
+1998-10-16	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789	1999-01-23 10:22:33.123456789	1998-07-08 12:37:26.876543211	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789
+1999-10-03	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789	2000-01-10 10:22:33.123456789	1999-06-25 12:37:26.876543211	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789
+2000-12-18	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789	2001-03-27 11:22:33.123456789	2000-09-09 13:37:26.876543211	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789
+2002-05-10	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789	2002-08-17 11:22:33.123456789	2002-01-30 11:37:26.876543211	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789
+2003-09-23	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789	2003-12-31 10:22:33.123456789	2003-06-15 12:37:26.876543211	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789
+2004-03-07	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789	2004-06-14 12:22:33.123456789	2003-11-28 12:37:26.876543211	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789
+2007-02-09	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789	2007-05-19 12:22:33.123456789	2006-11-01 12:37:26.876543211	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789
+2009-01-21	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789	2009-04-30 12:22:33.123456789	2008-10-13 13:37:26.876543211	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789
+2010-04-08	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789	2010-07-16 11:22:33.123456789	2009-12-29 11:37:26.876543211	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789
+2013-04-07	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789	2013-07-15 11:22:33.123456789	2012-12-28 11:37:26.876543211	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789
+2013-04-10	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789	2013-07-18 11:22:33.123456789	2012-12-31 11:37:26.876543211	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789
+2021-09-24	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789	2022-01-01 10:22:33.123456789	2021-06-16 12:37:26.876543211	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789
+2024-11-11	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789	2025-02-18 11:22:33.123456789	2024-08-03 13:37:26.876543211	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789
+4143-07-08	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789	4143-10-15 11:22:33.123456789	4143-03-30 12:37:26.876543211	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789
+4966-12-04	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789	4967-03-13 12:22:33.123456789	4966-08-26 13:37:26.876543211	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789
+5339-02-01	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789	5339-05-11 12:22:33.123456789	5338-10-24 13:37:26.876543211	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789
+5344-10-04	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789	5345-01-11 10:22:33.123456789	5344-06-26 12:37:26.876543211	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789
+5397-07-13	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789	5397-10-20 11:22:33.123456789	5397-04-04 12:37:26.876543211	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789
+5966-07-09	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789	5966-10-16 11:22:33.123456789	5966-03-31 12:37:26.876543211	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789
+6229-06-28	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789	6229-10-05 11:22:33.123456789	6229-03-20 12:37:26.876543211	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789
+6482-04-27	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789	6482-08-04 11:22:33.123456789	6482-01-17 11:37:26.876543211	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789
+6631-11-13	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789	6632-02-20 11:22:33.123456789	6631-08-05 13:37:26.876543211	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789
+6705-09-28	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789	6706-01-05 10:22:33.123456789	6705-06-20 12:37:26.876543211	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789
+6731-02-12	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789	6731-05-22 12:22:33.123456789	6730-11-04 12:37:26.876543211	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789
+7160-12-02	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789	7161-03-11 11:22:33.123456789	7160-08-24 13:37:26.876543211	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789
+7409-09-07	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789	7409-12-15 10:22:33.123456789	7409-05-30 12:37:26.876543211	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789
+7503-06-23	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789	7503-09-30 11:22:33.123456789	7503-03-15 12:37:26.876543211	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789
+8422-07-22	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789	8422-10-29 11:22:33.123456789	8422-04-13 12:37:26.876543211	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789
+8521-01-16	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789	8521-04-25 12:22:33.123456789	8520-10-08 13:37:26.876543211	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789
+9075-06-13	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789	9075-09-20 11:22:33.123456789	9075-03-05 11:37:26.876543211	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789
+9209-11-11	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789	9210-02-18 11:22:33.123456789	9209-08-03 13:37:26.876543211	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789
+9403-01-09	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789	9403-04-18 12:22:33.123456789	9402-10-01 13:37:26.876543211	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789
+PREHOOK: query: explain
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: dateval (type: date), tsval (type: timestamp), (dateval - tsval) (type: interval_day_time), (tsval - dateval) (type: interval_day_time), (tsval - tsval) (type: interval_day_time)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                    Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: date)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: timestamp), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: timestamp), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+order by dateval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+dateval	tsval	c2	c3	c4
+0004-09-22	0004-09-22 18:26:29.519542222	-0 18:26:30.519542222	0 18:26:30.519542222	0 00:00:00.000000000
+0528-10-27	0528-10-27 08:15:18.941718273	-0 08:15:19.941718273	0 08:15:19.941718273	0 00:00:00.000000000
+1319-02-02	1319-02-02 16:31:57.778	-0 16:31:58.778000000	0 16:31:58.778000000	0 00:00:00.000000000
+1404-07-23	1404-07-23 15:32:16.059185026	-0 15:32:17.059185026	0 15:32:17.059185026	0 00:00:00.000000000
+1815-05-06	1815-05-06 00:12:37.543584705	-0 00:12:38.543584705	0 00:12:38.543584705	0 00:00:00.000000000
+1883-04-17	1883-04-17 04:14:34.647766229	-0 04:14:35.647766229	0 04:14:35.647766229	0 00:00:00.000000000
+1966-08-16	1966-08-16 13:36:50.183618031	-0 13:36:51.183618031	0 13:36:51.183618031	0 00:00:00.000000000
+1973-04-17	1973-04-17 06:30:38.596784156	-0 06:30:38.596784156	0 06:30:38.596784156	0 00:00:00.000000000
+1974-10-04	1974-10-04 17:21:03.989	-0 17:21:03.989000000	0 17:21:03.989000000	0 00:00:00.000000000
+1976-03-03	1976-03-03 04:54:33.000895162	-0 04:54:33.000895162	0 04:54:33.000895162	0 00:00:00.000000000
+1976-05-06	1976-05-06 00:42:30.910786948	-0 00:42:30.910786948	0 00:42:30.910786948	0 00:00:00.000000000
+1978-08-05	1978-08-05 14:41:05.501	-0 14:41:05.501000000	0 14:41:05.501000000	0 00:00:00.000000000
+1981-04-25	1981-04-25 09:01:12.077192689	-0 09:01:12.077192689	0 09:01:12.077192689	0 00:00:00.000000000
+1981-11-15	1981-11-15 23:03:10.999338387	-0 23:03:10.999338387	0 23:03:10.999338387	0 00:00:00.000000000
+1985-07-20	1985-07-20 09:30:11	-0 09:30:11.000000000	0 09:30:11.000000000	0 00:00:00.000000000
+1985-11-18	1985-11-18 16:37:54	-0 16:37:54.000000000	0 16:37:54.000000000	0 00:00:00.000000000
+1987-02-21	1987-02-21 19:48:29	-0 19:48:29.000000000	0 19:48:29.000000000	0 00:00:00.000000000
+1987-05-28	1987-05-28 13:52:07.900916635	-0 13:52:07.900916635	0 13:52:07.900916635	0 00:00:00.000000000
+1998-10-16	1998-10-16 20:05:29.397591987	-0 20:05:29.397591987	0 20:05:29.397591987	0 00:00:00.000000000
+1999-10-03	1999-10-03 16:59:10.396903939	-0 16:59:10.396903939	0 16:59:10.396903939	0 00:00:00.000000000
+2000-12-18	2000-12-18 08:42:30.000595596	-0 08:42:30.000595596	0 08:42:30.000595596	0 00:00:00.000000000
+2002-05-10	2002-05-10 05:29:48.990818073	-0 05:29:48.990818073	0 05:29:48.990818073	0 00:00:00.000000000
+2003-09-23	2003-09-23 22:33:17.00003252	-0 22:33:17.000032520	0 22:33:17.000032520	0 00:00:00.000000000
+2004-03-07	2004-03-07 20:14:13	-0 20:14:13.000000000	0 20:14:13.000000000	0 00:00:00.000000000
+2007-02-09	2007-02-09 05:17:29.368756876	-0 05:17:29.368756876	0 05:17:29.368756876	0 00:00:00.000000000
+2009-01-21	2009-01-21 10:49:07.108	-0 10:49:07.108000000	0 10:49:07.108000000	0 00:00:00.000000000
+2010-04-08	2010-04-08 02:43:35.861742727	-0 02:43:35.861742727	0 02:43:35.861742727	0 00:00:00.000000000
+2013-04-07	2013-04-07 02:44:43.00086821	-0 02:44:43.000868210	0 02:44:43.000868210	0 00:00:00.000000000
+2013-04-10	2013-04-10 00:43:46.854731546	-0 00:43:46.854731546	0 00:43:46.854731546	0 00:00:00.000000000
+2021-09-24	2021-09-24 03:18:32.413655165	-0 03:18:32.413655165	0 03:18:32.413655165	0 00:00:00.000000000
+2024-11-11	2024-11-11 16:42:41.101	-0 16:42:41.101000000	0 16:42:41.101000000	0 00:00:00.000000000
+4143-07-08	4143-07-08 10:53:27.252802259	-0 10:53:27.252802259	0 10:53:27.252802259	0 00:00:00.000000000
+4966-12-04	4966-12-04 09:30:55.202	-0 09:30:55.202000000	0 09:30:55.202000000	0 00:00:00.000000000
+5339-02-01	5339-02-01 14:10:01.085678691	-0 14:10:01.085678691	0 14:10:01.085678691	0 00:00:00.000000000
+5344-10-04	5344-10-04 18:40:08.165	-0 18:40:08.165000000	0 18:40:08.165000000	0 00:00:00.000000000
+5397-07-13	5397-07-13 07:12:32.000896438	-0 07:12:32.000896438	0 07:12:32.000896438	0 00:00:00.000000000
+5966-07-09	5966-07-09 03:30:50.597	-0 03:30:50.597000000	0 03:30:50.597000000	0 00:00:00.000000000
+6229-06-28	6229-06-28 02:54:28.970117179	-0 02:54:28.970117179	0 02:54:28.970117179	0 00:00:00.000000000
+6482-04-27	6482-04-27 12:07:38.073915413	-0 12:07:38.073915413	0 12:07:38.073915413	0 00:00:00.000000000
+6631-11-13	6631-11-13 16:31:29.702202248	-0 16:31:29.702202248	0 16:31:29.702202248	0 00:00:00.000000000
+6705-09-28	6705-09-28 18:27:28.000845672	-0 18:27:28.000845672	0 18:27:28.000845672	0 00:00:00.000000000
+6731-02-12	6731-02-12 08:12:48.287783702	-0 08:12:48.287783702	0 08:12:48.287783702	0 00:00:00.000000000
+7160-12-02	7160-12-02 06:00:24.81200852	-0 06:00:24.812008520	0 06:00:24.812008520	0 00:00:00.000000000
+7409-09-07	7409-09-07 23:33:32.459349602	-0 23:33:32.459349602	0 23:33:32.459349602	0 00:00:00.000000000
+7503-06-23	7503-06-23 23:14:17.486	-0 23:14:17.486000000	0 23:14:17.486000000	0 00:00:00.000000000
+8422-07-22	8422-07-22 03:21:45.745036084	-0 03:21:45.745036084	0 03:21:45.745036084	0 00:00:00.000000000
+8521-01-16	8521-01-16 20:42:05.668832388	-0 20:42:05.668832388	0 20:42:05.668832388	0 00:00:00.000000000
+9075-06-13	9075-06-13 16:20:09.218517797	-0 16:20:09.218517797	0 16:20:09.218517797	0 00:00:00.000000000
+9209-11-11	9209-11-11 04:08:58.223768453	-0 04:08:58.223768453	0 04:08:58.223768453	0 00:00:00.000000000
+9403-01-09	9403-01-09 18:12:33.547	-0 18:12:33.547000000	0 18:12:33.547000000	0 00:00:00.000000000
+PREHOOK: query: explain
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: tsval (type: timestamp), (tsval - 99 11:22:33.123456789) (type: timestamp), (tsval - -99 11:22:33.123456789) (type: timestamp), (tsval + 99 11:22:33.123456789) (type: timestamp), (tsval + -99 11:22:33.123456789) (type: timestamp), (-99 11:22:33.123456789 + tsval) (type: timestamp), (99 11:22:33.123456789 + tsval) (type: timestamp)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                    Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: timestamp)
+                      sort order: +
+                      Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+order by tsval
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+tsval	c1	c2	c3	c4	c5	c6
+0004-09-22 18:26:29.519542222	0004-06-15 07:03:56.396085433	0004-12-31 05:49:02.642999011	0004-12-31 05:49:02.642999011	0004-06-15 07:03:56.396085433	0004-06-15 07:03:56.396085433	0004-12-31 05:49:02.642999011
+0528-10-27 08:15:18.941718273	0528-07-19 20:52:45.818261484	0529-02-03 19:37:52.065175062	0529-02-03 19:37:52.065175062	0528-07-19 20:52:45.818261484	0528-07-19 20:52:45.818261484	0529-02-03 19:37:52.065175062
+1319-02-02 16:31:57.778	1318-10-26 05:09:24.654543211	1319-05-13 03:54:30.901456789	1319-05-13 03:54:30.901456789	1318-10-26 05:09:24.654543211	1318-10-26 05:09:24.654543211	1319-05-13 03:54:30.901456789
+1404-07-23 15:32:16.059185026	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815	1404-10-31 02:54:49.182641815	1404-04-15 04:09:42.935728237	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815
+1815-05-06 00:12:37.543584705	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494	1815-08-13 11:35:10.667041494	1815-01-26 12:50:04.420127916	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494
+1883-04-17 04:14:34.647766229	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018	1883-07-25 15:37:07.771223018	1883-01-07 16:52:01.52430944	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018
+1966-08-16 13:36:50.183618031	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482	1966-11-23 23:59:23.30707482	1966-05-09 02:14:17.060161242	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482
+1973-04-17 06:30:38.596784156	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945	1973-07-25 18:53:11.720240945	1973-01-07 19:08:05.473327367	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945
+1974-10-04 17:21:03.989	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789	1975-01-12 03:43:37.112456789	1974-06-27 05:58:30.865543211	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789
+1976-03-03 04:54:33.000895162	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951	1976-06-10 17:17:06.124351951	1975-11-24 17:31:59.877438373	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951
+1976-05-06 00:42:30.910786948	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737	1976-08-13 12:05:04.034243737	1976-01-27 12:19:57.787330159	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737
+1978-08-05 14:41:05.501	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789	1978-11-13 01:03:38.624456789	1978-04-28 02:18:32.377543211	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789
+1981-04-25 09:01:12.077192689	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478	1981-08-02 21:23:45.200649478	1981-01-15 21:38:38.9537359	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478
+1981-11-15 23:03:10.999338387	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176	1982-02-23 10:25:44.122795176	1981-08-08 12:40:37.875881598	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176
+1985-07-20 09:30:11	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789	1985-10-27 19:52:44.123456789	1985-04-11 21:07:37.876543211	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789
+1985-11-18 16:37:54	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789	1986-02-26 04:00:27.123456789	1985-08-11 06:15:20.876543211	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789
+1987-02-21 19:48:29	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789	1987-06-01 08:11:02.123456789	1986-11-14 08:25:55.876543211	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789
+1987-05-28 13:52:07.900916635	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424	1987-09-05 01:14:41.024373424	1987-02-18 01:29:34.777459846	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424
+1998-10-16 20:05:29.397591987	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776	1999-01-24 06:28:02.521048776	1998-07-09 08:42:56.274135198	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776
+1999-10-03 16:59:10.396903939	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728	2000-01-11 03:21:43.520360728	1999-06-26 05:36:37.27344715	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728
+2000-12-18 08:42:30.000595596	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385	2001-03-27 20:05:03.124052385	2000-09-09 22:19:56.877138807	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385
+2002-05-10 05:29:48.990818073	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862	2002-08-17 16:52:22.114274862	2002-01-30 17:07:15.867361284	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862
+2003-09-23 22:33:17.00003252	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309	2004-01-01 08:55:50.123489309	2003-06-16 11:10:43.876575731	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309
+2004-03-07 20:14:13	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789	2004-06-15 08:36:46.123456789	2003-11-29 08:51:39.876543211	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789
+2007-02-09 05:17:29.368756876	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665	2007-05-19 17:40:02.492213665	2006-11-01 17:54:56.245300087	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665
+2009-01-21 10:49:07.108	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789	2009-04-30 23:11:40.231456789	2008-10-14 00:26:33.984543211	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789
+2010-04-08 02:43:35.861742727	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516	2010-07-16 14:06:08.985199516	2009-12-29 14:21:02.738285938	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516
+2013-04-07 02:44:43.00086821	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999	2013-07-15 14:07:16.124324999	2012-12-28 14:22:09.877411421	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999
+2013-04-10 00:43:46.854731546	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335	2013-07-18 12:06:19.978188335	2012-12-31 12:21:13.731274757	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335
+2021-09-24 03:18:32.413655165	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954	2022-01-01 13:41:05.537111954	2021-06-16 15:55:59.290198376	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954
+2024-11-11 16:42:41.101	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789	2025-02-19 04:05:14.224456789	2024-08-04 06:20:07.977543211	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789
+4143-07-08 10:53:27.252802259	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048	4143-10-15 22:16:00.376259048	4143-03-30 23:30:54.12934547	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048
+4966-12-04 09:30:55.202	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789	4967-03-13 21:53:28.325456789	4966-08-26 23:08:22.078543211	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789
+5339-02-01 14:10:01.085678691	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548	5339-05-12 02:32:34.20913548	5338-10-25 03:47:27.962221902	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548
+5344-10-04 18:40:08.165	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789	5345-01-12 05:02:41.288456789	5344-06-27 07:17:35.041543211	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789
+5397-07-13 07:12:32.000896438	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227	5397-10-20 18:35:05.124353227	5397-04-04 19:49:58.877439649	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227
+5966-07-09 03:30:50.597	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789	5966-10-16 14:53:23.720456789	5966-03-31 16:08:17.473543211	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789
+6229-06-28 02:54:28.970117179	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968	6229-10-05 14:17:02.093573968	6229-03-20 15:31:55.84666039	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968
+6482-04-27 12:07:38.073915413	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202	6482-08-04 23:30:11.197372202	6482-01-17 23:45:04.950458624	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202
+6631-11-13 16:31:29.702202248	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037	6632-02-21 03:54:02.825659037	6631-08-06 06:08:56.578745459	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037
+6705-09-28 18:27:28.000845672	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461	6706-01-06 04:50:01.124302461	6705-06-21 07:04:54.877388883	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461
+6731-02-12 08:12:48.287783702	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491	6731-05-22 20:35:21.411240491	6730-11-04 20:50:15.164326913	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491
+7160-12-02 06:00:24.81200852	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309	7161-03-11 17:22:57.935465309	7160-08-24 19:37:51.688551731	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309
+7409-09-07 23:33:32.459349602	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391	7409-12-16 09:56:05.582806391	7409-05-31 12:10:59.335892813	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391
+7503-06-23 23:14:17.486	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789	7503-10-01 10:36:50.609456789	7503-03-16 11:51:44.362543211	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789
+8422-07-22 03:21:45.745036084	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873	8422-10-29 14:44:18.868492873	8422-04-13 15:59:12.621579295	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873
+8521-01-16 20:42:05.668832388	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177	8521-04-26 09:04:38.792289177	8520-10-09 10:19:32.545375599	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177
+9075-06-13 16:20:09.218517797	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586	9075-09-21 03:42:42.341974586	9075-03-06 03:57:36.095061008	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586
+9209-11-11 04:08:58.223768453	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242	9210-02-18 15:31:31.347225242	9209-08-03 17:46:25.100311664	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242
+9403-01-09 18:12:33.547	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789	9403-04-19 06:35:06.670456789	9402-10-02 07:50:00.423543211	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789
+PREHOOK: query: explain
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+POSTHOOK: type: QUERY
+Explain
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: interval_arithmetic_1
+                  Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: 109 20:30:40.246913578 (type: interval_day_time), 89 02:14:26.000000000 (type: interval_day_time)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 50 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                    Limit
+                      Number of rows: 2
+                      Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 2
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@interval_arithmetic_1
+#### A masked pattern was here ####
+c0	c1
+109 20:30:40.246913578	89 02:14:26.000000000
+109 20:30:40.246913578	89 02:14:26.000000000
+PREHOOK: query: drop table interval_arithmetic_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@interval_arithmetic_1
+PREHOOK: Output: default@interval_arithmetic_1
+POSTHOOK: query: drop table interval_arithmetic_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@interval_arithmetic_1
+POSTHOOK: Output: default@interval_arithmetic_1


[19/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
----------------------------------------------------------------------
diff --git a/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java b/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
index fede273..08eee0b 100644
--- a/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
+++ b/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
@@ -35,7 +35,40 @@ public class GenVectorCode extends Task {
 
   private static String [][] templateExpansions =
     {
-      // The following datetime/interval arithmetic operations can be done using the vectorized values
+
+      /**
+       * date is stored in a LongColumnVector as epochDays
+       * interval_year_month is stored in a LongColumnVector as epochMonths
+       *
+       * interval_day_time and timestamp are stored in a TimestampColumnVector (2 longs to hold
+       *     very large number of nanoseconds)
+       *
+       * date – date --> type: interval_day_time
+       * timestamp – date --> type: interval_day_time
+       * date – timestamp --> type: interval_day_time
+       * timestamp – timestamp --> type: interval_day_time
+       *
+       * date +|- interval_day_time --> type: timestamp
+       * interval_day_time + date --> type: timestamp
+       *
+       * timestamp +|- interval_day_time --> type: timestamp
+       * interval_day_time +|- timestamp --> type: timestamp
+       *
+       * date +|- interval_year_month --> type: date
+       * interval_year_month + date --> type: date
+       *
+       * timestamp +|- interval_year_month --> type: timestamp
+       * interval_year_month + timestamp --> type: timestamp
+       *
+       * Adding/Subtracting months done with Calendar object
+       *
+       * Timestamp Compare with Long with long interpreted as seconds
+       * Timestamp Compare with Double with double interpreted as seconds with fractional nanoseconds
+       *
+       */
+
+      // The following datetime/interval arithmetic operations can be done using the vectorized values.
+      // Type interval_year_month (LongColumnVector storing months).
       {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_year_month", "interval_year_month", "+"},
       {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_year_month", "interval_year_month", "+"},
       {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_year_month", "interval_year_month", "+"},
@@ -44,80 +77,89 @@ public class GenVectorCode extends Task {
       {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"},
       {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"},
 
-      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_day_time", "interval_day_time", "+"},
-      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "interval_day_time", "+"},
-      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "interval_day_time", "+"},
-
-      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"},
-      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"},
-      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"},
-
-      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_day_time", "timestamp", "+"},
-      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "timestamp", "+"},
-      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "timestamp", "+"},
-
-      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "timestamp", "interval_day_time", "+"},
-      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "timestamp", "interval_day_time", "+"},
-      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "timestamp", "interval_day_time", "+"},
-
-      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "timestamp", "interval_day_time", "-"},
-      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "interval_day_time", "-"},
-      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "interval_day_time", "-"},
-
-      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "timestamp", "timestamp", "-"},
-      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "timestamp", "-"},
-      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "timestamp", "-"},
-
-      // The following datetime/interval arithmetic functions require type conversion for one or both operands
-      {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"},
-      {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"},
-      {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"},
-
-      {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""},
-      {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""},
-      {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""},
-
-      {"ColumnArithmeticColumnWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"},
-      {"ScalarArithmeticColumnWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"},
-      {"ColumnArithmeticScalarWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"},
-
-      {"ColumnArithmeticColumnWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""},
-      {"ScalarArithmeticColumnWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""},
-      {"ColumnArithmeticScalarWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""},
-
-      {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""},
-      {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""},
-      {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""},
-
-      {"ColumnArithmeticColumnWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"},
-      {"ScalarArithmeticColumnWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"},
-      {"ColumnArithmeticScalarWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"},
-
-      // Most year-month interval arithmetic needs its own generation
-      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"},
-      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"},
-      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"},
-
-      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"},
-      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"},
-      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"},
-
-      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"},
-      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"},
-      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"},
-
-      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"},
-      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"},
-      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"},
-
-      {"IntervalColumnArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"},
-      {"IntervalScalarArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"},
-      {"IntervalColumnArithmeticDateTimeScalarWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"},
-
-      {"IntervalColumnArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"},
-      {"IntervalScalarArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"},
-      {"IntervalColumnArithmeticDateTimeScalarWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"},
-
+      // Arithmetic on two type interval_day_time (TimestampColumnVector storing nanosecond interval
+      // in 2 longs) produces a interval_day_time.
+      {"TimestampArithmeticTimestamp", "Add", "interval_day_time", "Col", "interval_day_time", "Scalar"},
+      {"TimestampArithmeticTimestamp", "Add", "interval_day_time", "Scalar", "interval_day_time", "Column"},
+      {"TimestampArithmeticTimestamp", "Add", "interval_day_time", "Col", "interval_day_time", "Column"},
+
+      {"TimestampArithmeticTimestamp", "Subtract", "interval_day_time", "Col", "interval_day_time", "Scalar"},
+      {"TimestampArithmeticTimestamp", "Subtract", "interval_day_time", "Scalar", "interval_day_time", "Column"},
+      {"TimestampArithmeticTimestamp", "Subtract", "interval_day_time", "Col", "interval_day_time", "Column"},
+
+      // A type timestamp (TimestampColumnVector) plus/minus a type interval_day_time (TimestampColumnVector
+      // storing nanosecond interval in 2 longs) produces a timestamp.
+      {"TimestampArithmeticTimestamp", "Add", "interval_day_time", "Col", "timestamp", "Scalar"},
+      {"TimestampArithmeticTimestamp", "Add", "interval_day_time", "Scalar", "timestamp", "Column"},
+      {"TimestampArithmeticTimestamp", "Add", "interval_day_time", "Col", "timestamp", "Column"},
+
+      {"TimestampArithmeticTimestamp", "Add", "timestamp", "Col", "interval_day_time", "Scalar"},
+      {"TimestampArithmeticTimestamp", "Add", "timestamp", "Scalar", "interval_day_time", "Column"},
+      {"TimestampArithmeticTimestamp", "Add", "timestamp", "Col", "interval_day_time", "Column"},
+
+      {"TimestampArithmeticTimestamp", "Subtract", "timestamp", "Col", "interval_day_time", "Scalar"},
+      {"TimestampArithmeticTimestamp", "Subtract", "timestamp", "Scalar", "interval_day_time", "Column"},
+      {"TimestampArithmeticTimestamp", "Subtract", "timestamp", "Col", "interval_day_time", "Column"},
+
+      // A type timestamp (TimestampColumnVector) minus a type timestamp produces a
+      // type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 primitives).
+      {"TimestampArithmeticTimestamp", "Subtract", "timestamp", "Col", "timestamp", "Scalar"},
+      {"TimestampArithmeticTimestamp", "Subtract", "timestamp", "Scalar", "timestamp", "Column"},
+      {"TimestampArithmeticTimestamp", "Subtract", "timestamp", "Col", "timestamp", "Column"},
+
+      // Arithmetic with a type date (LongColumnVector storing epoch days) and type interval_day_time (IntervalDayTimeColumnVector storing
+      // nanosecond interval in 2 primitives) produces a type timestamp (TimestampColumnVector).
+      {"DateArithmeticTimestamp", "Add", "date", "Col", "interval_day_time", "Column"},
+      {"DateArithmeticTimestamp", "Add", "date", "Scalar", "interval_day_time", "Column"},
+      {"DateArithmeticTimestamp", "Add", "date", "Col", "interval_day_time", "Scalar"},
+
+      {"DateArithmeticTimestamp", "Subtract", "date", "Col", "interval_day_time", "Column"},
+      {"DateArithmeticTimestamp", "Subtract", "date", "Scalar", "interval_day_time", "Column"},
+      {"DateArithmeticTimestamp", "Subtract", "date", "Col", "interval_day_time", "Scalar"},
+
+      {"TimestampArithmeticDate", "Add", "interval_day_time", "Col", "date", "Column"},
+      {"TimestampArithmeticDate", "Add", "interval_day_time", "Scalar", "date", "Column"},
+      {"TimestampArithmeticDate", "Add", "interval_day_time", "Col", "date", "Scalar"},
+
+      // Subtraction with a type date (LongColumnVector storing days) and type timestamp produces a
+      // type interval_day_time (IntervalDayTimeColumnVector).
+      {"DateArithmeticTimestamp", "Subtract", "date", "Col", "timestamp", "Column"},
+      {"DateArithmeticTimestamp", "Subtract", "date", "Scalar", "timestamp", "Column"},
+      {"DateArithmeticTimestamp", "Subtract", "date", "Col", "timestamp", "Scalar"},
+
+      {"TimestampArithmeticDate", "Subtract", "timestamp", "Col", "date", "Column"},
+      {"TimestampArithmeticDate", "Subtract", "timestamp", "Scalar", "date", "Column"},
+      {"TimestampArithmeticDate", "Subtract", "timestamp", "Col", "date", "Scalar"},
+
+      // Arithmetic with a type date (LongColumnVector storing epoch days) and type interval_year_month (LongColumnVector storing
+      // months) produces a type date via a calendar calculation.
+      {"DateArithmeticIntervalYearMonth", "Add", "+", "date", "Col", "interval_year_month", "Column"},
+      {"DateArithmeticIntervalYearMonth", "Add", "+", "date", "Scalar", "interval_year_month", "Column"},
+      {"DateArithmeticIntervalYearMonth", "Add", "+", "date", "Col", "interval_year_month", "Scalar"},
+
+      {"DateArithmeticIntervalYearMonth", "Subtract", "-", "date", "Col", "interval_year_month", "Column"},
+      {"DateArithmeticIntervalYearMonth", "Subtract", "-", "date", "Scalar", "interval_year_month", "Column"},
+      {"DateArithmeticIntervalYearMonth", "Subtract", "-", "date", "Col", "interval_year_month", "Scalar"},
+
+      {"IntervalYearMonthArithmeticDate", "Add", "+", "interval_year_month", "Col", "date", "Column"},
+      {"IntervalYearMonthArithmeticDate", "Add", "+", "interval_year_month", "Scalar", "date", "Column"},
+      {"IntervalYearMonthArithmeticDate", "Add", "+", "interval_year_month", "Col", "date", "Scalar"},
+
+      // Arithmetic with a type timestamp (TimestampColumnVector) and type interval_year_month (LongColumnVector storing
+      // months) produces a type timestamp via a calendar calculation.
+      {"TimestampArithmeticIntervalYearMonth", "Add", "+", "timestamp", "Col", "interval_year_month", "Column"},
+      {"TimestampArithmeticIntervalYearMonth", "Add", "+", "timestamp", "Scalar", "interval_year_month", "Column"},
+      {"TimestampArithmeticIntervalYearMonth", "Add", "+", "timestamp", "Col", "interval_year_month", "Scalar"},
+
+      {"TimestampArithmeticIntervalYearMonth", "Subtract", "-", "timestamp", "Col", "interval_year_month", "Column"},
+      {"TimestampArithmeticIntervalYearMonth", "Subtract", "-", "timestamp", "Scalar", "interval_year_month", "Column"},
+      {"TimestampArithmeticIntervalYearMonth", "Subtract", "-", "timestamp", "Col", "interval_year_month", "Scalar"},
+
+      {"IntervalYearMonthArithmeticTimestamp", "Add","+", "interval_year_month", "Col", "timestamp", "Column"},
+      {"IntervalYearMonthArithmeticTimestamp", "Add","+", "interval_year_month", "Scalar", "timestamp", "Column"},
+      {"IntervalYearMonthArithmeticTimestamp", "Add","+", "interval_year_month", "Col", "timestamp", "Scalar"},
+
+      // Long/double arithmetic
       {"ColumnArithmeticScalar", "Add", "long", "long", "+"},
       {"ColumnArithmeticScalar", "Subtract", "long", "long", "-"},
       {"ColumnArithmeticScalar", "Multiply", "long", "long", "*"},
@@ -263,46 +305,129 @@ public class GenVectorCode extends Task {
       {"ScalarCompareColumn", "GreaterEqual", "long", "long", ">="},
       {"ScalarCompareColumn", "GreaterEqual", "double", "long", ">="},
 
-      {"TimestampColumnCompareTimestampScalar", "Equal"},
-      {"TimestampColumnCompareTimestampScalar", "NotEqual"},
-      {"TimestampColumnCompareTimestampScalar", "Less"},
-      {"TimestampColumnCompareTimestampScalar", "LessEqual"},
-      {"TimestampColumnCompareTimestampScalar", "Greater"},
-      {"TimestampColumnCompareTimestampScalar", "GreaterEqual"},
-
-      {"TimestampColumnCompareScalar", "Equal", "long"},
-      {"TimestampColumnCompareScalar", "Equal", "double"},
-      {"TimestampColumnCompareScalar", "NotEqual", "long"},
-      {"TimestampColumnCompareScalar", "NotEqual", "double"},
-      {"TimestampColumnCompareScalar", "Less", "long"},
-      {"TimestampColumnCompareScalar", "Less", "double"},
-      {"TimestampColumnCompareScalar", "LessEqual", "long"},
-      {"TimestampColumnCompareScalar", "LessEqual", "double"},
-      {"TimestampColumnCompareScalar", "Greater", "long"},
-      {"TimestampColumnCompareScalar", "Greater", "double"},
-      {"TimestampColumnCompareScalar", "GreaterEqual", "long"},
-      {"TimestampColumnCompareScalar", "GreaterEqual", "double"},
-
-      {"TimestampScalarCompareTimestampColumn", "Equal"},
-      {"TimestampScalarCompareTimestampColumn", "NotEqual"},
-      {"TimestampScalarCompareTimestampColumn", "Less"},
-      {"TimestampScalarCompareTimestampColumn", "LessEqual"},
-      {"TimestampScalarCompareTimestampColumn", "Greater"},
-      {"TimestampScalarCompareTimestampColumn", "GreaterEqual"},
-
-      {"ScalarCompareTimestampColumn", "Equal", "long"},
-      {"ScalarCompareTimestampColumn", "Equal", "double"},
-      {"ScalarCompareTimestampColumn", "NotEqual", "long"},
-      {"ScalarCompareTimestampColumn", "NotEqual", "double"},
-      {"ScalarCompareTimestampColumn", "Less", "long"},
-      {"ScalarCompareTimestampColumn", "Less", "double"},
-      {"ScalarCompareTimestampColumn", "LessEqual", "long"},
-      {"ScalarCompareTimestampColumn", "LessEqual", "double"},
-      {"ScalarCompareTimestampColumn", "Greater", "long"},
-      {"ScalarCompareTimestampColumn", "Greater", "double"},
-      {"ScalarCompareTimestampColumn", "GreaterEqual", "long"},
-      {"ScalarCompareTimestampColumn", "GreaterEqual", "double"},
-
+      // Compare timestamp to timestamp.
+      {"TimestampCompareTimestamp", "Equal", "==", "timestamp", "Col", "Column"},
+      {"TimestampCompareTimestamp", "NotEqual", "!=", "timestamp", "Col", "Column"},
+      {"TimestampCompareTimestamp", "Less", "<", "timestamp", "Col", "Column"},
+      {"TimestampCompareTimestamp", "LessEqual", "<=", "timestamp", "Col", "Column"},
+      {"TimestampCompareTimestamp", "Greater", ">", "timestamp", "Col", "Column"},
+      {"TimestampCompareTimestamp", "GreaterEqual", ">=", "timestamp", "Col", "Column"},
+
+      {"TimestampCompareTimestamp", "Equal", "==", "timestamp", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "NotEqual", "!=", "timestamp", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "Less", "<", "timestamp", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "LessEqual", "<=", "timestamp", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "Greater", ">", "timestamp", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "GreaterEqual", ">=", "timestamp", "Col", "Scalar"},
+
+      {"TimestampCompareTimestamp", "Equal", "==", "timestamp", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "NotEqual", "!=", "timestamp", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "Less", "<", "timestamp", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "LessEqual", "<=", "timestamp", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "Greater", ">", "timestamp", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "GreaterEqual", ">=", "timestamp", "Scalar", "Column"},
+
+      {"TimestampCompareTimestamp", "Equal", "==", "interval_day_time", "Col", "Column"},
+      {"TimestampCompareTimestamp", "NotEqual", "!=", "interval_day_time", "Col", "Column"},
+      {"TimestampCompareTimestamp", "Less", "<", "interval_day_time", "Col", "Column"},
+      {"TimestampCompareTimestamp", "LessEqual", "<=", "interval_day_time", "Col", "Column"},
+      {"TimestampCompareTimestamp", "Greater", ">", "interval_day_time", "Col", "Column"},
+      {"TimestampCompareTimestamp", "GreaterEqual", ">=", "interval_day_time", "Col", "Column"},
+
+      {"TimestampCompareTimestamp", "Equal", "==", "interval_day_time", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "NotEqual", "!=", "interval_day_time", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "Less", "<", "interval_day_time", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "LessEqual", "<=", "interval_day_time", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "Greater", ">", "interval_day_time", "Col", "Scalar"},
+      {"TimestampCompareTimestamp", "GreaterEqual", ">=", "interval_day_time", "Col", "Scalar"},
+
+      {"TimestampCompareTimestamp", "Equal", "==", "interval_day_time", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "NotEqual", "!=", "interval_day_time", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "Less", "<", "interval_day_time", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "LessEqual", "<=", "interval_day_time", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "Greater", ">", "interval_day_time", "Scalar", "Column"},
+      {"TimestampCompareTimestamp", "GreaterEqual", ">=", "interval_day_time", "Scalar", "Column"},
+
+      // Compare timestamp to integer seconds or double seconds with fractional nanoseonds.
+      {"TimestampCompareLongDouble", "Equal", "long", "==", "Col", "Column"},
+      {"TimestampCompareLongDouble", "Equal", "double", "==", "Col", "Column"},
+      {"TimestampCompareLongDouble", "NotEqual", "long", "!=", "Col", "Column"},
+      {"TimestampCompareLongDouble", "NotEqual", "double", "!=", "Col", "Column"},
+      {"TimestampCompareLongDouble", "Less", "long", "<", "Col", "Column"},
+      {"TimestampCompareLongDouble", "Less", "double", "<", "Col", "Column"},
+      {"TimestampCompareLongDouble", "LessEqual", "long", "<=", "Col", "Column"},
+      {"TimestampCompareLongDouble", "LessEqual", "double", "<=", "Col", "Column"},
+      {"TimestampCompareLongDouble", "Greater", "long", ">", "Col", "Column"},
+      {"TimestampCompareLongDouble", "Greater", "double", ">", "Col", "Column"},
+      {"TimestampCompareLongDouble", "GreaterEqual", "long", ">=", "Col", "Column"},
+      {"TimestampCompareLongDouble", "GreaterEqual", "double", ">=", "Col", "Column"},
+
+      {"LongDoubleCompareTimestamp", "Equal", "long", "==", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "Equal", "double", "==", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "NotEqual", "long", "!=", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "NotEqual", "double", "!=", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "Less", "long", "<", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "Less", "double", "<", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "LessEqual", "long", "<=", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "LessEqual", "double", "<=", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "Greater", "long", ">", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "Greater", "double", ">", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "GreaterEqual", "long", ">=", "Col", "Column"},
+      {"LongDoubleCompareTimestamp", "GreaterEqual", "double", ">=", "Col", "Column"},
+
+      {"TimestampCompareLongDouble", "Equal", "long", "==", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "Equal", "double", "==", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "NotEqual", "long", "!=", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "NotEqual", "double", "!=", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "Less", "long", "<", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "Less", "double", "<", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "LessEqual", "long", "<=", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "LessEqual", "double", "<=", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "Greater", "long", ">", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "Greater", "double", ">", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "GreaterEqual", "long", ">=", "Col", "Scalar"},
+      {"TimestampCompareLongDouble", "GreaterEqual", "double", ">=", "Col", "Scalar"},
+
+      {"LongDoubleCompareTimestamp", "Equal", "long", "==", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "Equal", "double", "==", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "NotEqual", "long", "!=", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "NotEqual", "double", "!=", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "Less", "long", "<", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "Less", "double", "<", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "LessEqual", "long", "<=", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "LessEqual", "double", "<=", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "Greater", "long", ">", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "Greater", "double", ">", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "GreaterEqual", "long", ">=", "Col", "Scalar"},
+      {"LongDoubleCompareTimestamp", "GreaterEqual", "double", ">=", "Col", "Scalar"},
+
+      {"TimestampCompareLongDouble", "Equal", "long", "==", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "Equal", "double", "==", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "NotEqual", "long", "!=", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "NotEqual", "double", "!=", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "Less", "long", "<", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "Less", "double", "<", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "LessEqual", "long", "<=", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "LessEqual", "double", "<=", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "Greater", "long", ">", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "Greater", "double", ">", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "GreaterEqual", "long", ">=", "Scalar", "Column"},
+      {"TimestampCompareLongDouble", "GreaterEqual", "double", ">=", "Scalar", "Column"},
+
+      {"LongDoubleCompareTimestamp", "Equal", "long", "==", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "Equal", "double", "==", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "NotEqual", "long", "!=", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "NotEqual", "double", "!=", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "Less", "long", "<", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "Less", "double", "<", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "LessEqual", "long", "<=", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "LessEqual", "double", "<=", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "Greater", "long", ">", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "Greater", "double", ">", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "GreaterEqual", "long", ">=", "Scalar", "Column"},
+      {"LongDoubleCompareTimestamp", "GreaterEqual", "double", ">=", "Scalar", "Column"},
+
+      // Filter long/double.
       {"FilterColumnCompareScalar", "Equal", "long", "double", "=="},
       {"FilterColumnCompareScalar", "Equal", "double", "double", "=="},
       {"FilterColumnCompareScalar", "NotEqual", "long", "double", "!="},
@@ -355,46 +480,132 @@ public class GenVectorCode extends Task {
       {"FilterScalarCompareColumn", "GreaterEqual", "long", "long", ">="},
       {"FilterScalarCompareColumn", "GreaterEqual", "double", "long", ">="},
 
-      {"FilterTimestampColumnCompareTimestampScalar", "Equal"},
-      {"FilterTimestampColumnCompareTimestampScalar", "NotEqual"},
-      {"FilterTimestampColumnCompareTimestampScalar", "Less"},
-      {"FilterTimestampColumnCompareTimestampScalar", "LessEqual"},
-      {"FilterTimestampColumnCompareTimestampScalar", "Greater"},
-      {"FilterTimestampColumnCompareTimestampScalar", "GreaterEqual"},
-
-      {"FilterTimestampColumnCompareScalar", "Equal", "long"},
-      {"FilterTimestampColumnCompareScalar", "Equal", "double"},
-      {"FilterTimestampColumnCompareScalar", "NotEqual", "long"},
-      {"FilterTimestampColumnCompareScalar", "NotEqual", "double"},
-      {"FilterTimestampColumnCompareScalar", "Less", "long"},
-      {"FilterTimestampColumnCompareScalar", "Less", "double"},
-      {"FilterTimestampColumnCompareScalar", "LessEqual", "long"},
-      {"FilterTimestampColumnCompareScalar", "LessEqual", "double"},
-      {"FilterTimestampColumnCompareScalar", "Greater", "long"},
-      {"FilterTimestampColumnCompareScalar", "Greater", "double"},
-      {"FilterTimestampColumnCompareScalar", "GreaterEqual", "long"},
-      {"FilterTimestampColumnCompareScalar", "GreaterEqual", "double"},
-
-      {"FilterTimestampScalarCompareTimestampColumn", "Equal"},
-      {"FilterTimestampScalarCompareTimestampColumn", "NotEqual"},
-      {"FilterTimestampScalarCompareTimestampColumn", "Less"},
-      {"FilterTimestampScalarCompareTimestampColumn", "LessEqual"},
-      {"FilterTimestampScalarCompareTimestampColumn", "Greater"},
-      {"FilterTimestampScalarCompareTimestampColumn", "GreaterEqual"},
-
-      {"FilterScalarCompareTimestampColumn", "Equal", "long"},
-      {"FilterScalarCompareTimestampColumn", "Equal", "double"},
-      {"FilterScalarCompareTimestampColumn", "NotEqual", "long"},
-      {"FilterScalarCompareTimestampColumn", "NotEqual", "double"},
-      {"FilterScalarCompareTimestampColumn", "Less", "long"},
-      {"FilterScalarCompareTimestampColumn", "Less", "double"},
-      {"FilterScalarCompareTimestampColumn", "LessEqual", "long"},
-      {"FilterScalarCompareTimestampColumn", "LessEqual", "double"},
-      {"FilterScalarCompareTimestampColumn", "Greater", "long"},
-      {"FilterScalarCompareTimestampColumn", "Greater", "double"},
-      {"FilterScalarCompareTimestampColumn", "GreaterEqual", "long"},
-      {"FilterScalarCompareTimestampColumn", "GreaterEqual", "double"},
-
+      // Filter timestamp against timestamp, or interval day time against interval day time.
+
+      {"FilterTimestampCompareTimestamp", "Equal", "==", "timestamp", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "NotEqual", "!=", "timestamp", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "Less", "<", "timestamp", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "LessEqual", "<=", "timestamp", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "Greater", ">", "timestamp", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "GreaterEqual", ">=", "timestamp", "Col", "Column"},
+
+      {"FilterTimestampCompareTimestamp", "Equal", "==", "timestamp", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "NotEqual", "!=", "timestamp", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "Less", "<", "timestamp", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "LessEqual", "<=", "timestamp", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "Greater", ">", "timestamp", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "GreaterEqual", ">=", "timestamp", "Col", "Scalar"},
+
+      {"FilterTimestampCompareTimestamp", "Equal", "==", "timestamp", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "NotEqual", "!=", "timestamp", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "Less", "<", "timestamp", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "LessEqual", "<=", "timestamp", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "Greater", ">", "timestamp", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "GreaterEqual", ">=", "timestamp", "Scalar", "Column"},
+
+      {"FilterTimestampCompareTimestamp", "Equal", "==", "interval_day_time", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "NotEqual", "!=", "interval_day_time", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "Less", "<", "interval_day_time", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "LessEqual", "<=", "interval_day_time", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "Greater", ">", "interval_day_time", "Col", "Column"},
+      {"FilterTimestampCompareTimestamp", "GreaterEqual", ">=", "interval_day_time", "Col", "Column"},
+
+      {"FilterTimestampCompareTimestamp", "Equal", "==", "interval_day_time", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "NotEqual", "!=", "interval_day_time", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "Less", "<", "interval_day_time", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "LessEqual", "<=", "interval_day_time", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "Greater", ">", "interval_day_time", "Col", "Scalar"},
+      {"FilterTimestampCompareTimestamp", "GreaterEqual", ">=", "interval_day_time", "Col", "Scalar"},
+
+      {"FilterTimestampCompareTimestamp", "Equal", "==", "interval_day_time", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "NotEqual", "!=", "interval_day_time", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "Less", "<", "interval_day_time", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "LessEqual", "<=", "interval_day_time", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "Greater", ">", "interval_day_time", "Scalar", "Column"},
+      {"FilterTimestampCompareTimestamp", "GreaterEqual", ">=", "interval_day_time", "Scalar", "Column"},
+
+      // Filter timestamp against long (seconds) or double (seconds with fractional
+      // nanoseconds).
+
+      {"FilterTimestampCompareLongDouble", "Equal", "long", "==", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "Equal", "double", "==", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "NotEqual", "long", "!=", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "NotEqual", "double", "!=", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "Less", "long", "<", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "Less", "double", "<", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "LessEqual", "long", "<=", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "LessEqual", "double", "<=", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "Greater", "long", ">", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "Greater", "double", ">", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "GreaterEqual", "long", ">=", "Col", "Column"},
+      {"FilterTimestampCompareLongDouble", "GreaterEqual", "double", ">=", "Col", "Column"},
+
+      {"FilterLongDoubleCompareTimestamp", "Equal", "long", "==", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Equal", "double", "==", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "NotEqual", "long", "!=", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "NotEqual", "double", "!=", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Less", "long", "<", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Less", "double", "<", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "LessEqual", "long", "<=", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "LessEqual", "double", "<=", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Greater", "long", ">", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Greater", "double", ">", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "GreaterEqual", "long", ">=", "Col", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "GreaterEqual", "double", ">=", "Col", "Column"},
+
+      {"FilterTimestampCompareLongDouble", "Equal", "long", "==", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "Equal", "double", "==", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "NotEqual", "long", "!=", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "NotEqual", "double", "!=", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "Less", "long", "<", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "Less", "double", "<", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "LessEqual", "long", "<=", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "LessEqual", "double", "<=", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "Greater", "long", ">", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "Greater", "double", ">", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "GreaterEqual", "long", ">=", "Col", "Scalar"},
+      {"FilterTimestampCompareLongDouble", "GreaterEqual", "double", ">=", "Col", "Scalar"},
+
+      {"FilterLongDoubleCompareTimestamp", "Equal", "long", "==", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "Equal", "double", "==", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "NotEqual", "long", "!=", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "NotEqual", "double", "!=", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "Less", "long", "<", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "Less", "double", "<", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "LessEqual", "long", "<=", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "LessEqual", "double", "<=", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "Greater", "long", ">", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "Greater", "double", ">", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "GreaterEqual", "long", ">=", "Col", "Scalar"},
+      {"FilterLongDoubleCompareTimestamp", "GreaterEqual", "double", ">=", "Col", "Scalar"},
+
+      {"FilterTimestampCompareLongDouble", "Equal", "long", "==", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "Equal", "double", "==", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "NotEqual", "long", "!=", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "NotEqual", "double", "!=", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "Less", "long", "<", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "Less", "double", "<", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "LessEqual", "long", "<=", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "LessEqual", "double", "<=", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "Greater", "long", ">", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "Greater", "double", ">", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "GreaterEqual", "long", ">=", "Scalar", "Column"},
+      {"FilterTimestampCompareLongDouble", "GreaterEqual", "double", ">=", "Scalar", "Column"},
+
+      {"FilterLongDoubleCompareTimestamp", "Equal", "long", "==", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Equal", "double", "==", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "NotEqual", "long", "!=", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "NotEqual", "double", "!=", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Less", "long", "<", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Less", "double", "<", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "LessEqual", "long", "<=", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "LessEqual", "double", "<=", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Greater", "long", ">", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "Greater", "double", ">", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "GreaterEqual", "long", ">=", "Scalar", "Column"},
+      {"FilterLongDoubleCompareTimestamp", "GreaterEqual", "double", ">=", "Scalar", "Column"},
+
+      // String group comparison.
       {"FilterStringGroupColumnCompareStringGroupScalarBase", "Equal", "=="},
       {"FilterStringGroupColumnCompareStringGroupScalarBase", "NotEqual", "!="},
       {"FilterStringGroupColumnCompareStringGroupScalarBase", "Less", "<"},
@@ -488,26 +699,28 @@ public class GenVectorCode extends Task {
       {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "Greater", ">"},
       {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "GreaterEqual", ">="},
 
-      {"FilterDecimalColumnCompareScalar", "Equal", "=="},
-      {"FilterDecimalColumnCompareScalar", "NotEqual", "!="},
-      {"FilterDecimalColumnCompareScalar", "Less", "<"},
-      {"FilterDecimalColumnCompareScalar", "LessEqual", "<="},
-      {"FilterDecimalColumnCompareScalar", "Greater", ">"},
-      {"FilterDecimalColumnCompareScalar", "GreaterEqual", ">="},
-
-      {"FilterDecimalScalarCompareColumn", "Equal", "=="},
-      {"FilterDecimalScalarCompareColumn", "NotEqual", "!="},
-      {"FilterDecimalScalarCompareColumn", "Less", "<"},
-      {"FilterDecimalScalarCompareColumn", "LessEqual", "<="},
-      {"FilterDecimalScalarCompareColumn", "Greater", ">"},
-      {"FilterDecimalScalarCompareColumn", "GreaterEqual", ">="},
-
-      {"FilterDecimalColumnCompareColumn", "Equal", "=="},
-      {"FilterDecimalColumnCompareColumn", "NotEqual", "!="},
-      {"FilterDecimalColumnCompareColumn", "Less", "<"},
-      {"FilterDecimalColumnCompareColumn", "LessEqual", "<="},
-      {"FilterDecimalColumnCompareColumn", "Greater", ">"},
-      {"FilterDecimalColumnCompareColumn", "GreaterEqual", ">="},
+
+      {"FilterDecimalColumnCompareDecimalScalar", "Equal", "=="},
+      {"FilterDecimalColumnCompareDecimalScalar", "NotEqual", "!="},
+      {"FilterDecimalColumnCompareDecimalScalar", "Less", "<"},
+      {"FilterDecimalColumnCompareDecimalScalar", "LessEqual", "<="},
+      {"FilterDecimalColumnCompareDecimalScalar", "Greater", ">"},
+      {"FilterDecimalColumnCompareDecimalScalar", "GreaterEqual", ">="},
+
+      {"FilterDecimalScalarCompareDecimalColumn", "Equal", "=="},
+      {"FilterDecimalScalarCompareDecimalColumn", "NotEqual", "!="},
+      {"FilterDecimalScalarCompareDecimalColumn", "Less", "<"},
+      {"FilterDecimalScalarCompareDecimalColumn", "LessEqual", "<="},
+      {"FilterDecimalScalarCompareDecimalColumn", "Greater", ">"},
+      {"FilterDecimalScalarCompareDecimalColumn", "GreaterEqual", ">="},
+
+      {"FilterDecimalColumnCompareDecimalColumn", "Equal", "=="},
+      {"FilterDecimalColumnCompareDecimalColumn", "NotEqual", "!="},
+      {"FilterDecimalColumnCompareDecimalColumn", "Less", "<"},
+      {"FilterDecimalColumnCompareDecimalColumn", "LessEqual", "<="},
+      {"FilterDecimalColumnCompareDecimalColumn", "Greater", ">"},
+      {"FilterDecimalColumnCompareDecimalColumn", "GreaterEqual", ">="},
+
 
       {"StringGroupScalarCompareStringGroupColumnBase", "Equal", "=="},
       {"StringGroupScalarCompareStringGroupColumnBase", "NotEqual", "!="},
@@ -585,6 +798,9 @@ public class GenVectorCode extends Task {
       {"FilterDecimalColumnBetween", ""},
       {"FilterDecimalColumnBetween", "!"},
 
+      {"FilterTimestampColumnBetween", ""},
+      {"FilterTimestampColumnBetween", "!"},
+
       {"ColumnCompareColumn", "Equal", "long", "double", "=="},
       {"ColumnCompareColumn", "Equal", "double", "double", "=="},
       {"ColumnCompareColumn", "NotEqual", "long", "double", "!="},
@@ -611,58 +827,34 @@ public class GenVectorCode extends Task {
       {"ColumnCompareColumn", "GreaterEqual", "long", "long", ">="},
       {"ColumnCompareColumn", "GreaterEqual", "double", "long", ">="},
 
-      // Interval comparisons
+      // Interval year month comparisons
       {"DTIScalarCompareColumn", "Equal", "interval_year_month"},
-      {"DTIScalarCompareColumn", "Equal", "interval_day_time"},
       {"DTIScalarCompareColumn", "NotEqual", "interval_year_month"},
-      {"DTIScalarCompareColumn", "NotEqual", "interval_day_time"},
       {"DTIScalarCompareColumn", "Less", "interval_year_month"},
-      {"DTIScalarCompareColumn", "Less", "interval_day_time"},
       {"DTIScalarCompareColumn", "LessEqual", "interval_year_month"},
-      {"DTIScalarCompareColumn", "LessEqual", "interval_day_time"},
       {"DTIScalarCompareColumn", "Greater", "interval_year_month"},
-      {"DTIScalarCompareColumn", "Greater", "interval_day_time"},
       {"DTIScalarCompareColumn", "GreaterEqual", "interval_year_month"},
-      {"DTIScalarCompareColumn", "GreaterEqual", "interval_day_time"},
 
       {"DTIColumnCompareScalar", "Equal", "interval_year_month"},
-      {"DTIColumnCompareScalar", "Equal", "interval_day_time"},
       {"DTIColumnCompareScalar", "NotEqual", "interval_year_month"},
-      {"DTIColumnCompareScalar", "NotEqual", "interval_day_time"},
       {"DTIColumnCompareScalar", "Less", "interval_year_month"},
-      {"DTIColumnCompareScalar", "Less", "interval_day_time"},
       {"DTIColumnCompareScalar", "LessEqual", "interval_year_month"},
-      {"DTIColumnCompareScalar", "LessEqual", "interval_day_time"},
       {"DTIColumnCompareScalar", "Greater", "interval_year_month"},
-      {"DTIColumnCompareScalar", "Greater", "interval_day_time"},
       {"DTIColumnCompareScalar", "GreaterEqual", "interval_year_month"},
-      {"DTIColumnCompareScalar", "GreaterEqual", "interval_day_time"},
 
       {"FilterDTIScalarCompareColumn", "Equal", "interval_year_month"},
-      {"FilterDTIScalarCompareColumn", "Equal", "interval_day_time"},
       {"FilterDTIScalarCompareColumn", "NotEqual", "interval_year_month"},
-      {"FilterDTIScalarCompareColumn", "NotEqual", "interval_day_time"},
       {"FilterDTIScalarCompareColumn", "Less", "interval_year_month"},
-      {"FilterDTIScalarCompareColumn", "Less", "interval_day_time"},
       {"FilterDTIScalarCompareColumn", "LessEqual", "interval_year_month"},
-      {"FilterDTIScalarCompareColumn", "LessEqual", "interval_day_time"},
       {"FilterDTIScalarCompareColumn", "Greater", "interval_year_month"},
-      {"FilterDTIScalarCompareColumn", "Greater", "interval_day_time"},
       {"FilterDTIScalarCompareColumn", "GreaterEqual", "interval_year_month"},
-      {"FilterDTIScalarCompareColumn", "GreaterEqual", "interval_day_time"},
 
       {"FilterDTIColumnCompareScalar", "Equal", "interval_year_month"},
-      {"FilterDTIColumnCompareScalar", "Equal", "interval_day_time"},
       {"FilterDTIColumnCompareScalar", "NotEqual", "interval_year_month"},
-      {"FilterDTIColumnCompareScalar", "NotEqual", "interval_day_time"},
       {"FilterDTIColumnCompareScalar", "Less", "interval_year_month"},
-      {"FilterDTIColumnCompareScalar", "Less", "interval_day_time"},
       {"FilterDTIColumnCompareScalar", "LessEqual", "interval_year_month"},
-      {"FilterDTIColumnCompareScalar", "LessEqual", "interval_day_time"},
       {"FilterDTIColumnCompareScalar", "Greater", "interval_year_month"},
-      {"FilterDTIColumnCompareScalar", "Greater", "interval_day_time"},
       {"FilterDTIColumnCompareScalar", "GreaterEqual", "interval_year_month"},
-      {"FilterDTIColumnCompareScalar", "GreaterEqual", "interval_day_time"},
 
       // Date comparisons
       {"DTIScalarCompareColumn", "Equal", "date"},
@@ -759,24 +951,12 @@ public class GenVectorCode extends Task {
       // Casts
       {"ColumnUnaryFunc", "Cast", "long", "double", "", "", "(long)", "", ""},
       {"ColumnUnaryFunc", "Cast", "double", "long", "", "", "(double)", "", ""},
-      {"ColumnUnaryFunc", "CastTimestampToLongVia", "long", "long", "MathExpr.fromTimestamp", "",
-        "", "", "timestamp"},
-      {"ColumnUnaryFunc", "CastTimestampToDoubleVia", "double", "long",
-          "MathExpr.fromTimestampToDouble", "", "", "", "timestamp"},
       {"ColumnUnaryFunc", "CastDoubleToBooleanVia", "long", "double", "MathExpr.toBool", "",
         "", "", ""},
       {"ColumnUnaryFunc", "CastLongToBooleanVia", "long", "long", "MathExpr.toBool", "",
         "", "", ""},
       {"ColumnUnaryFunc", "CastDateToBooleanVia", "long", "long", "MathExpr.toBool", "",
             "", "", "date"},
-      {"ColumnUnaryFunc", "CastTimestampToBooleanVia", "long", "long", "MathExpr.toBool", "",
-            "", "", "timestamp"},
-      {"ColumnUnaryFunc", "CastLongToTimestampVia", "long", "long", "MathExpr.longToTimestamp", "",
-          "", "", ""},
-      {"ColumnUnaryFunc", "CastMillisecondsLongToTimestampVia", "long", "long", "MathExpr.millisecondsLongToTimestamp", "",
-            "", "", ""},
-      {"ColumnUnaryFunc", "CastDoubleToTimestampVia", "long", "double",
-         "MathExpr.doubleToTimestamp", "", "", "", ""},
 
       // Boolean to long is done with an IdentityExpression
       // Boolean to double is done with standard Long to Double cast
@@ -823,6 +1003,16 @@ public class GenVectorCode extends Task {
       {"VectorUDAFMinMaxString", "VectorUDAFMaxString", ">", "max",
           "_FUNC_(expr) - Returns the minimum value of expr (vectorized, type: string)"},
 
+      {"VectorUDAFMinMaxTimestamp", "VectorUDAFMaxTimestamp", "<", "max",
+          "_FUNC_(expr) - Returns the maximum value of expr (vectorized, type: timestamp)"},
+      {"VectorUDAFMinMaxTimestamp", "VectorUDAFMinTimestamp", ">", "min",
+          "_FUNC_(expr) - Returns the minimum value of expr (vectorized, type: timestamp)"},
+
+      {"VectorUDAFMinMaxIntervalDayTime", "VectorUDAFMaxIntervalDayTime", "<", "max",
+          "_FUNC_(expr) - Returns the maximum value of expr (vectorized, type: interval_day_time)"},
+      {"VectorUDAFMinMaxIntervalDayTime", "VectorUDAFMinIntervalDayTime", ">", "min",
+          "_FUNC_(expr) - Returns the minimum value of expr (vectorized, type: interval_day_time)"},
+
         //template, <ClassName>, <ValueType>
         {"VectorUDAFSum", "VectorUDAFSumLong", "long"},
         {"VectorUDAFSum", "VectorUDAFSumDouble", "double"},
@@ -967,26 +1157,30 @@ public class GenVectorCode extends Task {
         generateColumnCompareScalar(tdesc);
       } else if (tdesc[0].equals("ScalarCompareColumn")) {
         generateScalarCompareColumn(tdesc);
-      } else if (tdesc[0].equals("TimestampScalarCompareTimestampColumn")) {
-          generateTimestampScalarCompareTimestampColumn(tdesc);
-      } else if (tdesc[0].equals("ScalarCompareTimestampColumn")) {
-          generateScalarCompareTimestampColumn(tdesc);
-      } else if (tdesc[0].equals("TimestampColumnCompareTimestampScalar")) {
-          generateTimestampColumnCompareTimestampScalar(tdesc);
-      } else if (tdesc[0].equals("TimestampColumnCompareScalar")) {
-          generateTimestampColumnCompareScalar(tdesc);
+
+      } else if (tdesc[0].equals("TimestampCompareTimestamp")) {
+        generateTimestampCompareTimestamp(tdesc);
+
+      } else if (tdesc[0].equals("TimestampCompareLongDouble")) {
+        generateTimestampCompareLongDouble(tdesc);
+
+      } else if (tdesc[0].equals("LongDoubleCompareTimestamp")) {
+        generateLongDoubleCompareTimestamp(tdesc);
+
       } else if (tdesc[0].equals("FilterColumnCompareScalar")) {
         generateFilterColumnCompareScalar(tdesc);
       } else if (tdesc[0].equals("FilterScalarCompareColumn")) {
         generateFilterScalarCompareColumn(tdesc);
-      } else if (tdesc[0].equals("FilterTimestampColumnCompareTimestampScalar")) {
-          generateFilterTimestampColumnCompareTimestampScalar(tdesc);
-      } else if (tdesc[0].equals("FilterTimestampColumnCompareScalar")) {
-          generateFilterTimestampColumnCompareScalar(tdesc);
-      } else if (tdesc[0].equals("FilterTimestampScalarCompareTimestampColumn")) {
-          generateFilterTimestampScalarCompareTimestampColumn(tdesc);
-      } else if (tdesc[0].equals("FilterScalarCompareTimestampColumn")) {
-          generateFilterScalarCompareTimestampColumn(tdesc);
+
+      } else if (tdesc[0].equals("FilterTimestampCompareTimestamp")) {
+        generateFilterTimestampCompareTimestamp(tdesc);
+
+      } else if (tdesc[0].equals("FilterTimestampCompareLongDouble")) {
+        generateFilterTimestampCompareLongDouble(tdesc);
+
+      } else if (tdesc[0].equals("FilterLongDoubleCompareTimestamp")) {
+        generateFilterLongDoubleCompareTimestamp(tdesc);
+
       } else if (tdesc[0].equals("FilterColumnBetween")) {
         generateFilterColumnBetween(tdesc);
       } else if (tdesc[0].equals("ScalarArithmeticColumn") || tdesc[0].equals("ScalarDivideColumn")) {
@@ -1008,7 +1202,11 @@ public class GenVectorCode extends Task {
       } else if (tdesc[0].equals("VectorUDAFMinMaxString")) {
         generateVectorUDAFMinMaxString(tdesc);
       } else if (tdesc[0].equals("VectorUDAFMinMaxDecimal")) {
-        generateVectorUDAFMinMaxDecimal(tdesc);
+        generateVectorUDAFMinMaxObject(tdesc);
+      } else if (tdesc[0].equals("VectorUDAFMinMaxTimestamp")) {
+        generateVectorUDAFMinMaxObject(tdesc);
+      } else if (tdesc[0].equals("VectorUDAFMinMaxIntervalDayTime")) {
+        generateVectorUDAFMinMaxObject(tdesc);
       } else if (tdesc[0].equals("VectorUDAFSum")) {
         generateVectorUDAFSum(tdesc);
       } else if (tdesc[0].equals("VectorUDAFAvg")) {
@@ -1029,7 +1227,9 @@ public class GenVectorCode extends Task {
         generateFilterTruncStringColumnBetween(tdesc);
       } else if (tdesc[0].equals("FilterDecimalColumnBetween")) {
         generateFilterDecimalColumnBetween(tdesc);
-      } else if (tdesc[0].equals("StringGroupColumnCompareStringGroupScalarBase")) {
+      } else if (tdesc[0].equals("FilterTimestampColumnBetween")) {
+        generateFilterTimestampColumnBetween(tdesc);
+       } else if (tdesc[0].equals("StringGroupColumnCompareStringGroupScalarBase")) {
         generateStringGroupColumnCompareStringGroupScalarBase(tdesc);
       } else if (tdesc[0].equals("StringGroupColumnCompareStringScalar")) {
         generateStringGroupColumnCompareStringScalar(tdesc);
@@ -1059,12 +1259,12 @@ public class GenVectorCode extends Task {
         generateIfExprScalarColumn(tdesc);
       } else if (tdesc[0].equals("IfExprScalarScalar")) {
         generateIfExprScalarScalar(tdesc);
-      } else if (tdesc[0].equals("FilterDecimalColumnCompareScalar")) {
-        generateFilterDecimalColumnCompareScalar(tdesc);
-      } else if (tdesc[0].equals("FilterDecimalScalarCompareColumn")) {
-        generateFilterDecimalScalarCompareColumn(tdesc);
-      } else if (tdesc[0].equals("FilterDecimalColumnCompareColumn")) {
-        generateFilterDecimalColumnCompareColumn(tdesc);
+      } else if (tdesc[0].equals("FilterDecimalColumnCompareDecimalScalar")) {
+        generateFilterDecimalColumnCompareDecimalScalar(tdesc);
+      } else if (tdesc[0].equals("FilterDecimalScalarCompareDecimalColumn")) {
+        generateFilterDecimalScalarCompareDecimalColumn(tdesc);
+      } else if (tdesc[0].equals("FilterDecimalColumnCompareDecimalColumn")) {
+        generateFilterDecimalColumnCompareDecimalColumn(tdesc);
       } else if (tdesc[0].equals("FilterDTIScalarCompareColumn")) {
         generateFilterDTIScalarCompareColumn(tdesc);
       } else if (tdesc[0].equals("FilterDTIColumnCompareScalar")) {
@@ -1079,24 +1279,28 @@ public class GenVectorCode extends Task {
         generateScalarArithmeticColumn(tdesc);
       } else if (tdesc[0].equals("DTIColumnArithmeticDTIColumnNoConvert")) {
         generateColumnArithmeticColumn(tdesc);
-      } else if (tdesc[0].equals("ColumnArithmeticColumnWithConvert")) {
-        generateColumnArithmeticColumnWithConvert(tdesc);
-      } else if (tdesc[0].equals("ScalarArithmeticColumnWithConvert")) {
-        generateScalarArithmeticColumnWithConvert(tdesc);
-      } else if (tdesc[0].equals("ColumnArithmeticScalarWithConvert")) {
-        generateColumnArithmeticScalarWithConvert(tdesc);
-      } else if (tdesc[0].equals("DateTimeColumnArithmeticIntervalColumnWithConvert")) {
-        generateDateTimeColumnArithmeticIntervalColumnWithConvert(tdesc);
-      } else if (tdesc[0].equals("DateTimeScalarArithmeticIntervalColumnWithConvert")) {
-        generateDateTimeScalarArithmeticIntervalColumnWithConvert(tdesc);
-      } else if (tdesc[0].equals("DateTimeColumnArithmeticIntervalScalarWithConvert")) {
-        generateDateTimeColumnArithmeticIntervalScalarWithConvert(tdesc);
-      } else if (tdesc[0].equals("IntervalColumnArithmeticDateTimeColumnWithConvert")) {
-        generateDateTimeColumnArithmeticIntervalColumnWithConvert(tdesc);
-      } else if (tdesc[0].equals("IntervalScalarArithmeticDateTimeColumnWithConvert")) {
-        generateDateTimeScalarArithmeticIntervalColumnWithConvert(tdesc);
-      } else if (tdesc[0].equals("IntervalColumnArithmeticDateTimeScalarWithConvert")) {
-        generateDateTimeColumnArithmeticIntervalScalarWithConvert(tdesc);
+
+      } else if (tdesc[0].equals("DateArithmeticIntervalYearMonth")) {
+        generateDateTimeArithmeticIntervalYearMonth(tdesc);
+
+      } else if (tdesc[0].equals("IntervalYearMonthArithmeticDate")) {
+        generateDateTimeArithmeticIntervalYearMonth(tdesc);
+
+      } else if (tdesc[0].equals("TimestampArithmeticIntervalYearMonth")) {
+        generateDateTimeArithmeticIntervalYearMonth(tdesc);
+
+      } else if (tdesc[0].equals("IntervalYearMonthArithmeticTimestamp")) {
+        generateDateTimeArithmeticIntervalYearMonth(tdesc);
+
+      } else if (tdesc[0].equals("TimestampArithmeticTimestamp")) {
+        generateTimestampArithmeticTimestamp(tdesc);
+
+      } else if (tdesc[0].equals("DateArithmeticTimestamp")) {
+        generateDateArithmeticTimestamp(tdesc);
+
+      } else if (tdesc[0].equals("TimestampArithmeticDate")) {
+        generateTimestampArithmeticDate(tdesc);
+
       } else {
         continue;
       }
@@ -1162,6 +1366,20 @@ public class GenVectorCode extends Task {
         className, templateString);
   }
 
+  private void generateFilterTimestampColumnBetween(String[] tdesc) throws IOException {
+    String optionalNot = tdesc[1];
+    String className = "FilterTimestampColumn" + (optionalNot.equals("!") ? "Not" : "")
+        + "Between";
+    // Read the template into a string, expand it, and write it.
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<OptionalNot>", optionalNot);
+
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
   private void generateFilterColumnBetween(String[] tdesc) throws Exception {
     String operandType = tdesc[1];
     String optionalNot = tdesc[2];
@@ -1233,7 +1451,7 @@ public class GenVectorCode extends Task {
         className, templateString);
   }
 
-  private void generateVectorUDAFMinMaxDecimal(String[] tdesc) throws Exception {
+  private void generateVectorUDAFMinMaxObject(String[] tdesc) throws Exception {
       String className = tdesc[1];
       String operatorSymbol = tdesc[2];
       String descName = tdesc[3];
@@ -1631,7 +1849,7 @@ public class GenVectorCode extends Task {
     String vectorExprArgType = operandType;
     if (operandType.equals("long")) {
       // interval types can use long version
-      vectorExprArgType = "int_interval_family";
+      vectorExprArgType = "int_interval_year_month";
     }
     // Expand, and write result
     templateString = templateString.replaceAll("<ClassName>", className);
@@ -1663,7 +1881,7 @@ public class GenVectorCode extends Task {
     // Toss in timestamp and date.
     if (operandType.equals("long")) {
       // Let comparisons occur for DATE and TIMESTAMP, too.
-      vectorExprArgType = "int_datetime_interval_family";
+      vectorExprArgType = "int_date_interval_year_month";
     }
     templateString = templateString.replaceAll("<VectorExprArgType>", vectorExprArgType);
 
@@ -1695,8 +1913,8 @@ public class GenVectorCode extends Task {
 
     // Toss in timestamp and date.
     if (operandType2.equals("long") && operandType3.equals("long")) {
-      vectorExprArgType2 = "int_datetime_interval_family";
-      vectorExprArgType3 = "int_datetime_interval_family";
+      vectorExprArgType2 = "int_date_interval_year_month";
+      vectorExprArgType3 = "int_date_interval_year_month";
     }
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
     templateString = templateString.replaceAll("<VectorExprArgType3>", vectorExprArgType3);
@@ -1728,8 +1946,8 @@ public class GenVectorCode extends Task {
 
     // Toss in timestamp and date.
     if (operandType2.equals("long") && operandType3.equals("long")) {
-      vectorExprArgType2 = "int_datetime_interval_family";
-      vectorExprArgType3 = "int_datetime_interval_family";
+      vectorExprArgType2 = "int_date_interval_year_month";
+      vectorExprArgType3 = "int_date_interval_year_month";
     }
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
     templateString = templateString.replaceAll("<VectorExprArgType3>", vectorExprArgType3);
@@ -1760,8 +1978,8 @@ public class GenVectorCode extends Task {
 
     // Toss in timestamp and date.
     if (operandType2.equals("long") && operandType3.equals("long")) {
-      vectorExprArgType2 = "int_datetime_interval_family";
-      vectorExprArgType3 = "int_datetime_interval_family";
+      vectorExprArgType2 = "int_date_interval_year_month";
+      vectorExprArgType3 = "int_date_interval_year_month";
     }
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
     templateString = templateString.replaceAll("<VectorExprArgType3>", vectorExprArgType3);
@@ -1892,12 +2110,10 @@ public class GenVectorCode extends Task {
     String vectorExprArgType1 = operandType1;
     String vectorExprArgType2 = operandType2;
 
-    // For column to column only, we toss in timestamp and date.
-    // But {timestamp|date} and scalar must be handled separately.
+    // For column to column only, we toss in date and interval_year_month.
     if (operandType1.equals("long") && operandType2.equals("long")) {
-      // Let comparisons occur for DATE and TIMESTAMP, too.
-      vectorExprArgType1 = "int_datetime_interval_family";
-      vectorExprArgType2 = "int_datetime_interval_family";
+      vectorExprArgType1 = "int_date_interval_year_month";
+      vectorExprArgType2 = "int_date_interval_year_month";
     }
     templateString = templateString.replaceAll("<VectorExprArgType1>", vectorExprArgType1);
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
@@ -1919,132 +2135,246 @@ public class GenVectorCode extends Task {
     }
   }
 
-  private void generateTimestampScalarCompareTimestampColumn(String[] tdesc) throws Exception {
+  // -----------------------------------------------------------------------------------------------
+  //
+  // Filter timestamp against timestamp, long (seconds), and double (seconds with fractional
+  // nanoseconds).
+  //
+  //  Filter  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //  Filter  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   {Long|Double}Column
+  //* Filter  {Long|Double}Col     {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //
+  //  Filter  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampScalar
+  //  Filter  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   {Long|Double}Scalar
+  //* Filter  {Long|Double}Col     {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampScalar
+  //
+  //  Filter  TimestampScalar      {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //  Filter  TimestampScalar      {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   {Long|Double}Column
+  //* Filter  {Long|Double}Scalar  {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //
+  // -----------------------------------------------------------------------------------------------
+
+  private void generateFilterTimestampCompareTimestamp(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
-    String className = "TimestampScalar" + operatorName + "TimestampColumn";
-    String baseClassName = "LongScalar" + operatorName + "LongColumn";
-    //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
-    String templateString = readFile(templateFile);
-    templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
-    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
-        className, templateString);
-  }
+    String operatorSymbol = tdesc[2];
+    String operandType = tdesc[3];
+    String camelOperandType = getCamelCaseType(operandType);
 
-  private void generateTimestampColumnCompareTimestampScalar(String[] tdesc) throws Exception {
-    String operatorName = tdesc[1];
-    String className = "TimestampCol" + operatorName + "TimestampScalar";
-    String baseClassName = "LongCol" + operatorName + "LongScalar";
+    String className = "Filter" + camelOperandType + tdesc[4] + operatorName + camelOperandType + tdesc[5];
+    String baseClassName = "FilterTimestamp" + tdesc[4] + operatorName + "Timestamp" + tdesc[5] + "Base";
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = "FilterTimestamp" + (tdesc[4].equals("Col") ? "Column" : tdesc[4]) + "CompareTimestamp" +
+        tdesc[5];
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType>", operandType);
+    templateString = templateString.replaceAll("<CamelOperandType>", camelOperandType);
+    templateString = templateString.replaceAll("<HiveOperandType>", getTimestampHiveType(operandType));
+
+    String inputColumnVectorType = this.getColumnVectorType(operandType);
+    templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
+
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
 
-  private void generateFilterTimestampColumnCompareTimestampScalar(String[] tdesc) throws Exception {
+  private void generateFilterTimestampCompareLongDouble(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
-    String className = "FilterTimestampCol" + operatorName + "TimestampScalar";
-    String baseClassName = "FilterLongCol" + operatorName + "LongScalar";
+    String operandType = tdesc[2];
+    String camelCaseOperandType = getCamelCaseType(operandType);
+    String operatorSymbol = tdesc[3];
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType);
+
+    String className = "FilterTimestamp" + tdesc[4] + operatorName + camelCaseOperandType + tdesc[5];
+
+    // Timestamp Scalar case becomes use long/double scalar class.
+    String baseClassName;
+    if (tdesc[4].equals("Scalar")) {
+      baseClassName = "org.apache.hadoop.hive.ql.exec.vector.expressions.gen." +
+          "Filter" + camelCaseOperandType + "Scalar" + operatorName + camelCaseOperandType + "Column";
+    } else {
+      baseClassName = "";
+    }
+
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = "FilterTimestamp" + (tdesc[4].equals("Col") ? "Column" : tdesc[4]) + "CompareLongDouble" +
+        tdesc[5];
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    if (baseClassName.length() > 0) {
+      templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    }
+    templateString = templateString.replaceAll("<OperandType>", operandType);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<InputColumnVectorType2>", inputColumnVectorType2);
+    templateString = templateString.replaceAll("<GetTimestampLongDoubleMethod>", timestampLongDoubleMethod(operandType));
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
 
-  private void generateFilterTimestampScalarCompareTimestampColumn(String[] tdesc) throws Exception {
+  private void generateFilterLongDoubleCompareTimestamp(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
-    String className = "FilterTimestampScalar" + operatorName + "TimestampColumn";
-    String baseClassName = "FilterLongScalar" + operatorName + "LongColumn";
+    String operandType = tdesc[2];
+    String camelCaseOperandType = getCamelCaseType(operandType);
+    String operatorSymbol = tdesc[3];
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType);
+
+    String className = "Filter" + getCamelCaseType(operandType) + tdesc[4] + operatorName + "Timestamp" + tdesc[5];
+
+    // Timestamp Scalar case becomes use long/double scalar class.
+    String baseClassName;
+    if (tdesc[5].equals("Scalar")) {
+      baseClassName = "org.apache.hadoop.hive.ql.exec.vector.expressions.gen." +
+          "Filter" + camelCaseOperandType + "Col" + operatorName + camelCaseOperandType + "Scalar";
+    } else {
+      baseClassName = "";
+    }
+
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = "FilterLongDouble" + (tdesc[4].equals("Col") ? "Column" : tdesc[4]) + "CompareTimestamp" +
+        tdesc[5];
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    if (baseClassName.length() > 0) {
+      templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    }
+    templateString = templateString.replaceAll("<OperandType>", operandType);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<InputColumnVectorType1>", inputColumnVectorType1);
+    templateString = templateString.replaceAll("<GetTimestampLongDoubleMethod>", timestampLongDoubleMethod(operandType));
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
 
-  private String timestampScalarConversion(String operandType) {
+  private String timestampLongDoubleMethod(String operandType) {
     if (operandType.equals("long")) {
-      return "secondsToNanoseconds";
+      return "getTimestampAsLong";
     } else if (operandType.equals("double")) {
-      return "doubleToNanoseconds";
+      return "getDouble";
     } else {
       return "unknown";
     }
   }
 
-  private void generateScalarCompareTimestampColumn(String[] tdesc) throws Exception {
+  // -----------------------------------------------------------------------------------------------
+  //
+  // Compare timestamp against timestamp, long (seconds), and double (seconds with fractional
+  // nanoseconds).
+  //
+  //  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   {Long|Double}Column
+  //* {Long|Double}Col     {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //
+  //  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampScalar
+  //  TimestampCol         {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   {Long|Double}Scalar
+  //* {Long|Double}Col     {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampScalar
+  //
+  //  TimestampScalar      {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //  TimestampScalar      {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   {Long|Double}Column
+  //* {Long|Double}Scalar  {Equal|Greater|GreaterEqual|Less|LessEqual|NotEqual}   TimestampColumn
+  //
+  // -----------------------------------------------------------------------------------------------
+
+
+  private void generateTimestampCompareTimestamp(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
-    String operandType = tdesc[2];
-    String className = getCamelCaseType(operandType) + "Scalar" + operatorName + "TimestampColumn";
-    String baseClassName = "LongScalar" + operatorName + "LongColumn";
-    //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
-    String templateString = readFile(templateFile);
-    templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
-    templateString = templateString.replaceAll("<OperandType>", operandType);
-    templateString = templateString.replaceAll("<TimestampScalarConversion>", timestampScalarConversion(operandType));
-    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
-        className, templateString);
-  }
+    String operatorSymbol = tdesc[2];
+    String operandType = tdesc[3];
+    String camelOperandType = getCamelCaseType(operandType);
+    String className = camelOperandType + tdesc[4] + operatorName + camelOperandType + tdesc[5];
 
-  private void generateTimestampColumnCompareScalar(String[] tdesc) throws Exception {
-    String operatorName = tdesc[1];
-    String operandType = tdesc[2];
-    String className = "TimestampCol" + operatorName + getCamelCaseType(operandType) + "Scalar";
-    String baseClassName = "LongCol" + operatorName + "LongScalar";
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = "Timestamp" + (tdesc[4].equals("Col") ? "Column" : tdesc[4]) + "CompareTimestamp" +
+        (tdesc[5].equals("Col") ? "Column" : tdesc[5]);
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
     templateString = templateString.replaceAll("<OperandType>", operandType);
-    templateString = templateString.replaceAll("<TimestampScalarConversion>", timestampScalarConversion(operandType));
+    templateString = templateString.replaceAll("<CamelOperandType>", camelOperandType);
+    templateString = templateString.replaceAll("<HiveOperandType>", getTimestampHiveType(operandType));
+    templateString = templateString.replaceAll("<InputColumnVectorType>", getColumnVectorType(operandType));
+
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
 
-  private void generateFilterTimestampColumnCompareScalar(String[] tdesc) throws Exception {
+  private void generateTimestampCompareLongDouble(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
     String operandType = tdesc[2];
-    String className = "FilterTimestampCol" + operatorName + getCamelCaseType(operandType) + "Scalar";
-    String baseClassName = "FilterLongCol" + operatorName + "LongScalar";
+    String camelCaseOperandType = getCamelCaseType(operandType);
+    String operatorSymbol = tdesc[3];
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType);
+
+    String className = "Timestamp" + tdesc[4] + operatorName + getCamelCaseType(operandType) + tdesc[5];
+
+
+    // Timestamp Scalar case becomes use long/double scalar class.
+    String baseClassName;
+    if (tdesc[4].equals("Scalar")) {
+      baseClassName = camelCaseOperandType + "Scalar" + operatorName + camelCaseOperandType + "Column";
+    } else {
+      baseClassName = "";
+    }
+
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = "Timestamp" + (tdesc[4].equals("Col") ? "Column" : tdesc[4]) + "CompareLongDouble" +
+        tdesc[5];
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    if (baseClassName.length() > 0) {
+      templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    }
     templateString = templateString.replaceAll("<OperandType>", operandType);
-    templateString = templateString.replaceAll("<TimestampScalarConversion>", timestampScalarConversion(operandType));
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<InputColumnVectorType2>", inputColumnVectorType2);
+    templateString = templateString.replaceAll("<GetTimestampLongDoubleMethod>", timestampLongDoubleMethod(operandType));
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
 
-  private void generateFilterScalarCompareTimestampColumn(String[] tdesc) throws Exception {
+  private void generateLongDoubleCompareTimestamp(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
     String operandType = tdesc[2];
-    String className = "Filter" + getCamelCaseType(operandType) + "Scalar" + operatorName + "TimestampColumn";
-    String baseClassName = "FilterLongScalar" + operatorName + "LongColumn";
+    String camelCaseOperandType = getCamelCaseType(operandType);
+    String operatorSymbol = tdesc[3];
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType);
+
+    String className = getCamelCaseType(operandType) + tdesc[4] + operatorName + "Timestamp" + tdesc[5];
+
+    // Timestamp Scalar case becomes use long/double scalar class.
+    String baseClassName;
+    if (tdesc[5].equals("Scalar")) {
+      baseClassName = camelCaseOperandType + "Col" + operatorName + camelCaseOperandType + "Scalar";
+    } else {
+      baseClassName = "";
+    }
+
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = "LongDouble" + (tdesc[4].equals("Col") ? "Column" : tdesc[4]) + "CompareTimestamp" +
+        tdesc[5];
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    if (baseClassName.length() > 0) {
+      templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    }
     templateString = templateString.replaceAll("<OperandType>", operandType);
-    templateString = templateString.replaceAll("<TimestampScalarConversion>", timestampScalarConversion(operandType));
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<InputColumnVectorType1>", inputColumnVectorType1);
+    templateString = templateString.replaceAll("<GetTimestampLongDoubleMethod>", timestampLongDoubleMethod(operandType));
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
 
+  // -----------------------------------------------------------------------------------------------
+  //
+  // -----------------------------------------------------------------------------------------------
 
   private void generateColumnArithmeticOperatorColumn(String[] tdesc, String returnType,
          String className) throws Exception {
@@ -2147,7 +2477,7 @@ public class GenVectorCode extends Task {
         className, templateString);
 
     String testScalarType = operandType2;
-    if (isDateTimeIntervalType(testScalarType)) {
+    if (isDateIntervalType(testScalarType)) {
       testScalarType = "long";
     }
 
@@ -2229,7 +2559,7 @@ public class GenVectorCode extends Task {
         className, templateString);
 
      String testScalarType = operandType1;
-     if (isDateTimeIntervalType(testScalarType)) {
+     if (isDateIntervalType(testScalarType)) {
        testScalarType = "long";
      }
 
@@ -2346,19 +2676,19 @@ public class GenVectorCode extends Task {
     generateScalarArithmeticOperatorColumn(tdesc, returnType, className);
   }
 
-  private void generateFilterDecimalColumnCompareScalar(String[] tdesc) throws IOException {
+  private void generateFilterDecimalColumnCompareDecimalScalar(String[] tdesc) throws IOException {
     String operatorName = tdesc[1];
     String className = "FilterDecimalCol" + operatorName + "DecimalScalar";
     generateDecimalColumnCompare(tdesc, className);
   }
 
-  private void generateFilterDecimalScalarCompareColumn(String[] tdesc) throws IOException {
+  private void generateFilterDecimalScalarCompareDecimalColumn(String[] tdesc) throws IOException {
     String operatorName = tdesc[1];
     String className = "FilterDecimalScalar" + operatorName + "DecimalColumn";
     generateDecimalColumnCompare(tdesc, className);
   }
 
-  private void generateFilterDecimalColumnCompareColumn(String[] tdesc) throws IOException {
+  private void generateFilterDecimalColumnCompareDecimalColumn(String[] tdesc) throws IOException {
     String operatorName = tdesc[1];
     String className = "FilterDecimalCol" + operatorName + "DecimalColumn";
     generateDecimalColumnCompare(tdesc, className);
@@ -2444,310 +2774,283 @@ public class GenVectorCode extends Task {
         className, templateString);
   }
 
-  private void generateColumnArithmeticColumnWithConvert(String[] tdesc) throws Exception {
+  // DateColumnArithmeticIntervalYearMonthColumn.txt
+  // DateScalarArithmeticIntervalYearMonthColumn.txt
+  // DateColumnArithmeticIntervalYearMonthScalar.txt
+  //
+  // IntervalYearMonthColumnArithmeticDateColumn.txt
+  // IntervalYearMonthScalarArithmeticDateColumn.txt
+  // IntervalYearMonthColumnArithmeticDateScalar.txt
+  //
+  // TimestampColumnArithmeticIntervalYearMonthColumn.txt
+  // TimestampScalarArithmeticIntervalYearMonthColumn.txt
+  // TimestampColumnArithmeticIntervalYearMonthScalar.txt
+  //
+  // IntervalYearMonthColumnArithmeticTimestampColumn.txt
+  // IntervalYearMonthScalarArithmeticTimestampColumn.txt
+  // IntervalYearMonthColumnArithmeticTimestampScalar.txt
+  //
+  private void generateDateTimeArithmeticIntervalYearMonth(String[] tdesc) throws Exception {
     String operatorName = tdesc[1];
-    String operandType1 = tdesc[2];
-    String operandType2 = tdesc[3];
-    String operatorSymbol = tdesc[4];
-    String typeConversion1 = tdesc[5];
-    String typeConversion2 = tdesc[6];
-    String className = getCamelCaseType(operandType1)
-        + "Col" + operatorName + getCamelCaseType(operandType2) + "Column";
-    String returnType = getArithmeticReturnType(operandType1, operandType2);
-    String outputColumnVectorType = this.getColumnVectorType(returnType);
-    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
-    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
-    // For date/timestamp/interval, this should be "long"
-    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
-    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
-    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+    String operatorSymbol = tdesc[2];
+    String operandType1 = tdesc[3];
+    String colOrScalar1 = tdesc[4];
+    String operandType2 = tdesc[5];
+    String colOrScalar2 = tdesc[6];
+    String className = getCamelCaseType(operandType1) + colOrScalar1 + operatorName +
+        getCamelCaseType(operandType2) + colOrScalar2;
 
     //Read the template into a string;
-    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String fileName = getCamelCaseType(operandType1) + (colOrScalar1.equals("Col") ? "Column" : colOrScalar1) + "Arithmetic" +
+        getCamelCaseType(operandType2) + colOrScalar2;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, fileName + ".txt"));
     String templateString = readFile(templateFile);
     templateString = templateString.replaceAll("<ClassName>", className);
-    templateString = templateString.replaceAll("<InputColumnVectorType1>", inputColumnVectorType1);
-    templateString = templateString.replaceAll("<InputColumnVectorType2>", inputColumnVectorType2);
-    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
-    templateString = templateString.replaceAll("<OperatorName>", operatorName);
     templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
-    templateString = templateString.replaceAll("<OperandType1>", operandType1);
-    templateString = templateString.replaceAll("<OperandType2>", operandType2);
-    templateString = templateString.replaceAll("<ReturnType>", returnType);
-    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
-    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
-    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
-    templateString = templateString.replaceAll("<TypeConversion1>", typeConversion1);
-    templateString = templateString.replaceAll("<TypeConversion2>", typeConversion2);
-    templateString = templateString.replaceAll("<CamelReturnType>", getCamelCaseType(vectorReturnType));
+    templateString = templateString.replaceAll("<OperatorMethod>", operatorName.toLowerCase());
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
 
-    testCodeGen.addColumnColumnOperationTestCases(
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+
+    if (colOrScalar1.equals("Col") && colOrScalar1.equals("Column")) {
+      testCodeGen.addColumnColumnOperationTestCases(
+            className,
+            inputColumnVectorType1,
+            inputColumnVectorType2,
+            "long");
+    } else if (colOrScalar1.equals("Col") && colOrScalar1.equals("Scalar")) {
+      String testScalarType = operandType2;
+      if (isDateIntervalType(testScalarType)) {
+        testScalarType = "long";
+

<TRUNCATED>

[17/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt
deleted file mode 100644
index 353e849..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt
+++ /dev/null
@@ -1,445 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-
-/**
- * Generated from template FilterDecimalColumnCompareColumn.txt, which covers binary comparison 
- * filter expressions between two columns. Output is not produced in a separate column. 
- * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum1;
-  private int colNum2;
-
-  public <ClassName>(int colNum1, int colNum2) { 
-    this.colNum1 = colNum1;
-    this.colNum2 = colNum2;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    DecimalColumnVector inputColVector1 = (DecimalColumnVector) batch.cols[colNum1];
-    DecimalColumnVector inputColVector2 = (DecimalColumnVector) batch.cols[colNum2];
-    int[] sel = batch.selected;
-    boolean[] nullPos1 = inputColVector1.isNull;
-    boolean[] nullPos2 = inputColVector2.isNull;
-    int n = batch.size;
-    HiveDecimalWritable[] vector1 = inputColVector1.vector;
-    HiveDecimalWritable[] vector2 = inputColVector2.vector;
-
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    // handle case where neither input has nulls
-    if (inputColVector1.noNulls && inputColVector2.noNulls) {
-      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-
-        /* Either all must remain selected or all will be eliminated.
-         * Repeating property will not change.
-         */
-        if (!(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
-          batch.size = 0;
-        }
-      } else if (inputColVector1.isRepeating) {
-        if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else if (inputColVector2.isRepeating) {
-        if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else if (batch.selectedInUse) {
-        int newSize = 0;
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-            sel[newSize++] = i;
-          }
-        }
-        batch.size = newSize;
-      } else {
-        int newSize = 0;
-        for(int i = 0; i != n; i++) {
-          if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-            sel[newSize++] = i;
-          }
-        }
-        if (newSize < batch.size) {
-          batch.size = newSize;
-          batch.selectedInUse = true;
-        }
-      }
-
-    // handle case where only input 2 has nulls
-    } else if (inputColVector1.noNulls) {
-      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-        if (nullPos2[0] ||
-            !(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
-          batch.size = 0; 
-        } 
-      } else if (inputColVector1.isRepeating) {
-
-         // no need to check for nulls in input 1
-         if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos2[i]) {
-              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos2[i]) {
-              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else if (inputColVector2.isRepeating) {
-        if (nullPos2[0]) {
-
-          // no values will qualify because every comparison will be with NULL
-          batch.size = 0;
-          return;
-        }
-        if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else { // neither input is repeating
-        if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos2[i]) {
-              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos2[i]) {
-              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      }
-
-    // handle case where only input 1 has nulls
-    } else if (inputColVector2.noNulls) {
-      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-        if (nullPos1[0] ||
-            !(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
-          batch.size = 0; 
-          return;
-        } 
-      } else if (inputColVector1.isRepeating) {
-        if (nullPos1[0]) {
-
-          // if repeating value is null then every comparison will fail so nothing qualifies
-          batch.size = 0;
-          return; 
-        }
-        if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else if (inputColVector2.isRepeating) {
-         if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos1[i]) {
-              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos1[i]) {
-              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else { // neither input is repeating
-         if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos1[i]) {
-              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos1[i]) {
-              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      }
-
-    // handle case where both inputs have nulls
-    } else {
-      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-        if (nullPos1[0] || nullPos2[0] ||
-            !(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
-          batch.size = 0; 
-        } 
-      } else if (inputColVector1.isRepeating) {
-         if (nullPos1[0]) {
-           batch.size = 0;
-           return;
-         }
-         if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos2[i]) {
-              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos2[i]) {
-              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else if (inputColVector2.isRepeating) {
-        if (nullPos2[0]) {
-          batch.size = 0;
-          return;
-        }
-        if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos1[i]) {
-              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos1[i]) {
-              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } else { // neither input is repeating
-         if (batch.selectedInUse) {
-          int newSize = 0;
-          for(int j = 0; j != n; j++) {
-            int i = sel[j];
-            if (!nullPos1[i] && !nullPos2[i]) {
-              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          batch.size = newSize;
-        } else {
-          int newSize = 0;
-          for(int i = 0; i != n; i++) {
-            if (!nullPos1[i] && !nullPos2[i]) {
-              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
-                sel[newSize++] = i;
-              }
-            }
-          }
-          if (newSize < batch.size) {
-            batch.size = newSize;
-            batch.selectedInUse = true;
-          }
-        }
-      } 
-    }
-  }
-
-  @Override
-  public String getOutputType() {
-    return "boolean";
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return -1;
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.FILTER)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("decimal"),
-            VectorExpressionDescriptor.ArgumentType.getType("decimal"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalColumn.txt
new file mode 100644
index 0000000..a2352c6
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalColumn.txt
@@ -0,0 +1,445 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+
+/**
+ * Generated from template FilterDecimalColumnCompareColumn.txt, which covers binary comparison
+ * filter expressions between two columns. Output is not produced in a separate column.
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+
+  public <ClassName>(int colNum1, int colNum2) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    DecimalColumnVector inputColVector1 = (DecimalColumnVector) batch.cols[colNum1];
+    DecimalColumnVector inputColVector2 = (DecimalColumnVector) batch.cols[colNum2];
+    int[] sel = batch.selected;
+    boolean[] nullPos1 = inputColVector1.isNull;
+    boolean[] nullPos2 = inputColVector2.isNull;
+    int n = batch.size;
+    HiveDecimalWritable[] vector1 = inputColVector1.vector;
+    HiveDecimalWritable[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    // handle case where neither input has nulls
+    if (inputColVector1.noNulls && inputColVector2.noNulls) {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+
+        /* Either all must remain selected or all will be eliminated.
+         * Repeating property will not change.
+         */
+        if (!(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
+          batch.size = 0;
+        }
+      } else if (inputColVector1.isRepeating) {
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < batch.size) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+
+    // handle case where only input 2 has nulls
+    } else if (inputColVector1.noNulls) {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+        if (nullPos2[0] ||
+            !(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
+          batch.size = 0;
+        }
+      } else if (inputColVector1.isRepeating) {
+
+         // no need to check for nulls in input 1
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos2[i]) {
+              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos2[i]) {
+              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+        if (nullPos2[0]) {
+
+          // no values will qualify because every comparison will be with NULL
+          batch.size = 0;
+          return;
+        }
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else { // neither input is repeating
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos2[i]) {
+              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos2[i]) {
+              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      }
+
+    // handle case where only input 1 has nulls
+    } else if (inputColVector2.noNulls) {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+        if (nullPos1[0] ||
+            !(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
+          batch.size = 0;
+          return;
+        }
+      } else if (inputColVector1.isRepeating) {
+        if (nullPos1[0]) {
+
+          // if repeating value is null then every comparison will fail so nothing qualifies
+          batch.size = 0;
+          return;
+        }
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i]) {
+              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i]) {
+              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else { // neither input is repeating
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i]) {
+              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i]) {
+              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      }
+
+    // handle case where both inputs have nulls
+    } else {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+        if (nullPos1[0] || nullPos2[0] ||
+            !(vector1[0].compareTo(vector2[0]) <OperatorSymbol> 0)) {
+          batch.size = 0;
+        }
+      } else if (inputColVector1.isRepeating) {
+         if (nullPos1[0]) {
+           batch.size = 0;
+           return;
+         }
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos2[i]) {
+              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos2[i]) {
+              if (vector1[0].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+        if (nullPos2[0]) {
+          batch.size = 0;
+          return;
+        }
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i]) {
+              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i]) {
+              if (vector1[i].compareTo(vector2[0]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else { // neither input is repeating
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i] && !nullPos2[i]) {
+              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i] && !nullPos2[i]) {
+              if (vector1[i].compareTo(vector2[i]) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("decimal"),
+            VectorExpressionDescriptor.ArgumentType.getType("decimal"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalScalar.txt
new file mode 100644
index 0000000..bdd39b9
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareDecimalScalar.txt
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+
+/**
+ * This is a generated class to evaluate a <OperatorSymbol> comparison on a vector of decimal
+ * values.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private HiveDecimal value;
+
+  public <ClassName>(int colNum, HiveDecimal value) {
+    this.colNum = colNum;
+    this.value = value;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+    DecimalColumnVector inputColVector = (DecimalColumnVector) batch.cols[colNum];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    int n = batch.size;
+    HiveDecimalWritable[] vector = inputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero. Repeating property will not change.
+        if (!(DecimalUtil.compare(vector[0], value) <OperatorSymbol> 0)) {
+
+          // Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero. Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!(DecimalUtil.compare(vector[0], value) <OperatorSymbol> 0)) {
+
+            // Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+
+        // Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("decimal"),
+            VectorExpressionDescriptor.ArgumentType.getType("decimal"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt
deleted file mode 100644
index bdd39b9..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt
+++ /dev/null
@@ -1,160 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-
-/**
- * This is a generated class to evaluate a <OperatorSymbol> comparison on a vector of decimal
- * values.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private HiveDecimal value;
-
-  public <ClassName>(int colNum, HiveDecimal value) {
-    this.colNum = colNum;
-    this.value = value;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-    DecimalColumnVector inputColVector = (DecimalColumnVector) batch.cols[colNum];
-    int[] sel = batch.selected;
-    boolean[] nullPos = inputColVector.isNull;
-    int n = batch.size;
-    HiveDecimalWritable[] vector = inputColVector.vector;
-
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    if (inputColVector.noNulls) {
-      if (inputColVector.isRepeating) {
-
-        // All must be selected otherwise size would be zero. Repeating property will not change.
-        if (!(DecimalUtil.compare(vector[0], value) <OperatorSymbol> 0)) {
-
-          // Entire batch is filtered out.
-          batch.size = 0;
-        }
-      } else if (batch.selectedInUse) {
-        int newSize = 0;
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
-            sel[newSize++] = i;
-          }
-        }
-        batch.size = newSize;
-      } else {
-        int newSize = 0;
-        for(int i = 0; i != n; i++) {
-          if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
-            sel[newSize++] = i;
-          }
-        }
-        if (newSize < n) {
-          batch.size = newSize;
-          batch.selectedInUse = true;
-        }
-      }
-    } else {
-      if (inputColVector.isRepeating) {
-
-        // All must be selected otherwise size would be zero. Repeating property will not change.
-        if (!nullPos[0]) {
-          if (!(DecimalUtil.compare(vector[0], value) <OperatorSymbol> 0)) {
-
-            // Entire batch is filtered out.
-            batch.size = 0;
-          }
-        } else {
-          batch.size = 0;
-        }
-      } else if (batch.selectedInUse) {
-        int newSize = 0;
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          if (!nullPos[i]) {
-           if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
-             sel[newSize++] = i;
-           }
-          }
-        }
-
-        // Change the selected vector
-        batch.size = newSize;
-      } else {
-        int newSize = 0;
-        for(int i = 0; i != n; i++) {
-          if (!nullPos[i]) {
-            if (DecimalUtil.compare(vector[i], value) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-        }
-        if (newSize < n) {
-          batch.size = newSize;
-          batch.selectedInUse = true;
-        }
-      }
-    }
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return -1;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "boolean";
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.FILTER)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("decimal"),
-            VectorExpressionDescriptor.ArgumentType.getType("decimal"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt
deleted file mode 100644
index 0608016..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt
+++ /dev/null
@@ -1,160 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-
-/**
- * This is a generated class to evaluate a <OperatorSymbol> comparison on a vector of decimal
- * values.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private HiveDecimal value;
-
-  public <ClassName>(HiveDecimal value, int colNum) {
-    this.colNum = colNum;
-    this.value = value;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-    DecimalColumnVector inputColVector = (DecimalColumnVector) batch.cols[colNum];
-    int[] sel = batch.selected;
-    boolean[] nullPos = inputColVector.isNull;
-    int n = batch.size;
-    HiveDecimalWritable[] vector = inputColVector.vector;
-
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    if (inputColVector.noNulls) {
-      if (inputColVector.isRepeating) {
-
-        // All must be selected otherwise size would be zero. Repeating property will not change.
-        if (!(DecimalUtil.compare(value, vector[0]) <OperatorSymbol> 0)) {
-
-          // Entire batch is filtered out.
-          batch.size = 0;
-        }
-      } else if (batch.selectedInUse) {
-        int newSize = 0;
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
-            sel[newSize++] = i;
-          }
-        }
-        batch.size = newSize;
-      } else {
-        int newSize = 0;
-        for(int i = 0; i != n; i++) {
-          if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
-            sel[newSize++] = i;
-          }
-        }
-        if (newSize < n) {
-          batch.size = newSize;
-          batch.selectedInUse = true;
-        }
-      }
-    } else {
-      if (inputColVector.isRepeating) {
-
-        // All must be selected otherwise size would be zero. Repeating property will not change.
-        if (!nullPos[0]) {
-          if (!(DecimalUtil.compare(value, vector[0]) <OperatorSymbol> 0)) {
-
-            // Entire batch is filtered out.
-            batch.size = 0;
-          }
-        } else {
-          batch.size = 0;
-        }
-      } else if (batch.selectedInUse) {
-        int newSize = 0;
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          if (!nullPos[i]) {
-           if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
-             sel[newSize++] = i;
-           }
-          }
-        }
-
-        // Change the selected vector
-        batch.size = newSize;
-      } else {
-        int newSize = 0;
-        for(int i = 0; i != n; i++) {
-          if (!nullPos[i]) {
-            if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
-              sel[newSize++] = i;
-            }
-          }
-        }
-        if (newSize < n) {
-          batch.size = newSize;
-          batch.selectedInUse = true;
-        }
-      }
-    }
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return -1;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "boolean";
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.FILTER)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("decimal"),
-            VectorExpressionDescriptor.ArgumentType.getType("decimal"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.SCALAR,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareDecimalColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareDecimalColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareDecimalColumn.txt
new file mode 100644
index 0000000..0608016
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareDecimalColumn.txt
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+
+/**
+ * This is a generated class to evaluate a <OperatorSymbol> comparison on a vector of decimal
+ * values.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private HiveDecimal value;
+
+  public <ClassName>(HiveDecimal value, int colNum) {
+    this.colNum = colNum;
+    this.value = value;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+    DecimalColumnVector inputColVector = (DecimalColumnVector) batch.cols[colNum];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    int n = batch.size;
+    HiveDecimalWritable[] vector = inputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero. Repeating property will not change.
+        if (!(DecimalUtil.compare(value, vector[0]) <OperatorSymbol> 0)) {
+
+          // Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero. Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!(DecimalUtil.compare(value, vector[0]) <OperatorSymbol> 0)) {
+
+            // Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+
+        // Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (DecimalUtil.compare(value, vector[i]) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("decimal"),
+            VectorExpressionDescriptor.ArgumentType.getType("decimal"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt
new file mode 100644
index 0000000..57caf7e
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampColumn.txt
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterColumnCompareColumn.txt, which covers binary comparison
+ * expressions between two columns, however output is not produced in a separate column.
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+
+  public <ClassName>(int colNum1, int colNum2) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+    TimestampColumnVector inputColVector2 = (TimestampColumnVector) batch.cols[colNum2];
+    int[] sel = batch.selected;
+    boolean[] nullPos1 = inputColVector1.isNull;
+    boolean[] nullPos2 = inputColVector2.isNull;
+    int n = batch.size;
+    <OperandType>[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    // filter rows with NULL on left input
+    int newSize;
+    newSize = NullUtil.filterNulls(batch.cols[colNum1], batch.selectedInUse, sel, n);
+    if (newSize < n) {
+      n = batch.size = newSize;
+      batch.selectedInUse = true;
+    }
+
+    // filter rows with NULL on right input
+    newSize = NullUtil.filterNulls(batch.cols[colNum2], batch.selectedInUse, sel, n);
+    if (newSize < n) {
+      n = batch.size = newSize;
+      batch.selectedInUse = true;
+    }
+
+    // All rows with nulls have been filtered out, so just do normal filter for non-null case
+    if (n != 0 && inputColVector1.isRepeating && inputColVector2.isRepeating) {
+
+      // All must be selected otherwise size would be zero
+      // Repeating property will not change.
+      if (!(vector1[0] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(0))) {
+        batch.size = 0;
+      }
+    } else if (inputColVector1.isRepeating) {
+      <OperandType> value1 = vector1[0];
+      if (batch.selectedInUse) {
+        newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (value1 <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i)) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (value1 <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i)) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < batch.size) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <OperandType> value2 = inputColVector2.<GetTimestampLongDoubleMethod>(0);
+      if (batch.selectedInUse) {
+        newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (vector1[i] <OperatorSymbol> value2) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (vector1[i] <OperatorSymbol> value2) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < batch.size) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else if (batch.selectedInUse) {
+      newSize = 0;
+      for(int j = 0; j != n; j++) {
+        int i = sel[j];
+        if (vector1[i] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i)) {
+          sel[newSize++] = i;
+        }
+      }
+      batch.size = newSize;
+    } else {
+      newSize = 0;
+      for(int i = 0; i != n; i++) {
+        if (vector1[i] <OperatorSymbol>  inputColVector2.<GetTimestampLongDoubleMethod>(i)) {
+          sel[newSize++] = i;
+        }
+      }
+      if (newSize < batch.size) {
+        batch.size = newSize;
+        batch.selectedInUse = true;
+      }
+    }
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt
new file mode 100644
index 0000000..1b86691
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleColumnCompareTimestampScalar.txt
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterColumnCompareScalar.txt, which covers binary comparison
+ * expressions between a column and a scalar, however output is not produced in a separate column.
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  private static final long serialVersionUID = 1L;
+
+  public <ClassName>(int colNum, Timestamp value) {
+    super(colNum, TimestampColumnVector.<GetTimestampLongDoubleMethod>(value));
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt
new file mode 100644
index 0000000..f5f59c2
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterLongDoubleScalarCompareTimestampColumn.txt
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterScalarCompareTimestampColumn.txt, which covers comparison
+ * expressions between a long/double scalar and a timestamp column, however output is not produced
+ * in a separate column. The selected vector of the input {@link VectorizedRowBatch} is updated
+ * for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <OperandType> value;
+
+  public <ClassName>(<OperandType> value, int colNum) {
+    this.colNum = colNum;
+    this.value = value;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!(value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(0))) {
+          //Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i)) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i)) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!(value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(0))) {
+            //Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i)) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+        //Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i)) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterScalarCompareTimestampColumn.txt
deleted file mode 100644
index e0e5022..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterScalarCompareTimestampColumn.txt
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-
-/**
- * Generated from template FilterScalarCompareTimestampColumn.txt, which covers comparison 
- * expressions between a long or double scalar and a column, however output is not produced in a separate column. 
- * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
- * Note: For timestamp and long or double we implicitly interpret the long as the number
- * of seconds or double as seconds and fraction since the epoch.
- */
-public class <ClassName> extends <BaseClassName> {
-
-  public <ClassName>(<OperandType> value, int colNum) { 
-    super(TimestampUtils.<TimestampScalarConversion>(value), colNum);
-  }
-
-  public <ClassName>() {
-    super();
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.FILTER)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.SCALAR,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt
new file mode 100644
index 0000000..4298d79
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnBetween.txt
@@ -0,0 +1,171 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterTimestampColumnBetween.txt, which covers [NOT] BETWEEN filter
+ * expressions where a column is [NOT] between one scalar and another.
+ * Output is not produced in a separate column.  The selected vector of the input
+ * {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+
+  // The comparison is of the form "column BETWEEN leftValue AND rightValue"
+  private Timestamp leftValue;
+  private Timestamp rightValue;
+  private Timestamp scratchValue;
+
+  public <ClassName>(int colNum, Timestamp leftValue, Timestamp rightValue) {
+    this.colNum = colNum;
+    this.leftValue = leftValue;
+    this.rightValue = rightValue;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero.
+        // Repeating property will not change.
+        if (<OptionalNot>(inputColVector.compareTo(0, leftValue) < 0 || inputColVector.compareTo(0, rightValue) > 0)) {
+
+          // Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (<OptionalNot>(inputColVector.compareTo(leftValue, i) <= 0 && inputColVector.compareTo(i, rightValue) <= 0)) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (<OptionalNot>(inputColVector.compareTo(leftValue, i) <= 0 && inputColVector.compareTo(i, rightValue) <= 0)) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero.
+        // Repeating property will not change.
+        if (!nullPos[0]) {
+          if (<OptionalNot>(inputColVector.compareTo(0, leftValue) < 0 || inputColVector.compareTo(0, rightValue) > 0)) {
+
+            // Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+            if (<OptionalNot>(inputColVector.compareTo(leftValue, i) <= 0 && inputColVector.compareTo(i, rightValue) <= 0)) {
+             sel[newSize++] = i;
+            }
+          }
+        }
+
+        // Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (<OptionalNot>(inputColVector.compareTo(leftValue, i) <= 0 && inputColVector.compareTo(i, rightValue) <= 0)) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleColumn.txt
new file mode 100644
index 0000000..d10be96
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleColumn.txt
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterColumnCompareColumn.txt, which covers binary comparison
+ * expressions between two columns, however output is not produced in a separate column.
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+
+  public <ClassName>(int colNum1, int colNum2) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector1 = (TimestampColumnVector) batch.cols[colNum1];
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+    int[] sel = batch.selected;
+    boolean[] nullPos1 = inputColVector1.isNull;
+    boolean[] nullPos2 = inputColVector2.isNull;
+    int n = batch.size;
+    <OperandType>[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    // filter rows with NULL on left input
+    int newSize;
+    newSize = NullUtil.filterNulls(batch.cols[colNum1], batch.selectedInUse, sel, n);
+    if (newSize < n) {
+      n = batch.size = newSize;
+      batch.selectedInUse = true;
+    }
+
+    // filter rows with NULL on right input
+    newSize = NullUtil.filterNulls(batch.cols[colNum2], batch.selectedInUse, sel, n);
+    if (newSize < n) {
+      n = batch.size = newSize;
+      batch.selectedInUse = true;
+    }
+
+    // All rows with nulls have been filtered out, so just do normal filter for non-null case
+    if (n != 0 && inputColVector1.isRepeating && inputColVector2.isRepeating) {
+
+      // All must be selected otherwise size would be zero
+      // Repeating property will not change.
+      if (!(inputColVector1.<GetTimestampLongDoubleMethod>(0) <OperatorSymbol> vector2[0])) {
+        batch.size = 0;
+      }
+    } else if (inputColVector1.isRepeating) {
+      <OperandType> value1 = inputColVector1.<GetTimestampLongDoubleMethod>(0);
+      if (batch.selectedInUse) {
+        newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (value1 <OperatorSymbol> vector2[i]) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (value1 <OperatorSymbol> vector2[i]) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < batch.size) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <OperandType> value2 = vector2[0];
+      if (batch.selectedInUse) {
+        newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value2) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value2) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < batch.size) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else if (batch.selectedInUse) {
+      newSize = 0;
+      for(int j = 0; j != n; j++) {
+        int i = sel[j];
+        if (inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> vector2[i]) {
+          sel[newSize++] = i;
+        }
+      }
+      batch.size = newSize;
+    } else {
+      newSize = 0;
+      for(int i = 0; i != n; i++) {
+        if (inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol>  vector2[i]) {
+          sel[newSize++] = i;
+        }
+      }
+      if (newSize < batch.size) {
+        batch.size = newSize;
+        batch.selectedInUse = true;
+      }
+    }
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}


[18/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java b/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
index e8dc21b..b891e27 100644
--- a/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
+++ b/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
@@ -18,12 +18,16 @@
 package org.apache.hadoop.hive.common.type;
 
 import java.math.BigDecimal;
+import java.sql.Timestamp;
+import java.util.Date;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.hive.common.util.DateUtils;
+import org.apache.hive.common.util.IntervalDayTimeUtils;
+
+import sun.util.calendar.BaseCalendar;
 
 /**
  * Day-time interval type representing an offset in days/hours/minutes/seconds,
@@ -85,15 +89,23 @@ public class HiveIntervalDayTime implements Comparable<HiveIntervalDayTime> {
   }
 
   /**
+   *
+   * @return double representation of the interval day time, accurate to nanoseconds
+   */
+  public double getDouble() {
+    return totalSeconds + nanos / 1000000000;
+  }
+
+  /**
    * Ensures that the seconds and nanoseconds fields have consistent sign
    */
   protected void normalizeSecondsAndNanos() {
     if (totalSeconds > 0 && nanos < 0) {
       --totalSeconds;
-      nanos += DateUtils.NANOS_PER_SEC;
+      nanos += IntervalDayTimeUtils.NANOS_PER_SEC;
     } else if (totalSeconds < 0 && nanos > 0) {
       ++totalSeconds;
-      nanos -= DateUtils.NANOS_PER_SEC;
+      nanos -= IntervalDayTimeUtils.NANOS_PER_SEC;
     }
   }
 
@@ -103,7 +115,7 @@ public class HiveIntervalDayTime implements Comparable<HiveIntervalDayTime> {
     totalSeconds += TimeUnit.HOURS.toSeconds(hours);
     totalSeconds += TimeUnit.MINUTES.toSeconds(minutes);
     totalSeconds += TimeUnit.NANOSECONDS.toSeconds(nanos);
-    nanos = nanos % DateUtils.NANOS_PER_SEC;
+    nanos = nanos % IntervalDayTimeUtils.NANOS_PER_SEC;
 
     this.totalSeconds = totalSeconds;
     this.nanos = nanos;
@@ -120,7 +132,7 @@ public class HiveIntervalDayTime implements Comparable<HiveIntervalDayTime> {
   public void set(BigDecimal totalSecondsBd) {
     long totalSeconds = totalSecondsBd.longValue();
     BigDecimal fractionalSecs = totalSecondsBd.remainder(BigDecimal.ONE);
-    int nanos = fractionalSecs.multiply(DateUtils.NANOS_PER_SEC_BD).intValue();
+    int nanos = fractionalSecs.multiply(IntervalDayTimeUtils.NANOS_PER_SEC_BD).intValue();
     set(totalSeconds, nanos);
   }
 
@@ -155,6 +167,13 @@ public class HiveIntervalDayTime implements Comparable<HiveIntervalDayTime> {
     return 0 == compareTo((HiveIntervalDayTime) obj);
   }
 
+  /**
+   * Return a copy of this object.
+   */
+  public Object clone() {
+      return new HiveIntervalDayTime(totalSeconds, nanos);
+  }
+
   @Override
   public int hashCode() {
     return new HashCodeBuilder().append(totalSeconds).append(nanos).toHashCode();
@@ -190,23 +209,23 @@ public class HiveIntervalDayTime implements Comparable<HiveIntervalDayTime> {
           sign = -1;
         }
         int days = sign *
-            DateUtils.parseNumericValueWithRange("day", patternMatcher.group(2),
+            IntervalDayTimeUtils.parseNumericValueWithRange("day", patternMatcher.group(2),
                 0, Integer.MAX_VALUE);
         byte hours = (byte) (sign *
-            DateUtils.parseNumericValueWithRange("hour", patternMatcher.group(3), 0, 23));
+            IntervalDayTimeUtils.parseNumericValueWithRange("hour", patternMatcher.group(3), 0, 23));
         byte minutes = (byte) (sign *
-            DateUtils.parseNumericValueWithRange("minute", patternMatcher.group(4), 0, 59));
+            IntervalDayTimeUtils.parseNumericValueWithRange("minute", patternMatcher.group(4), 0, 59));
         int seconds = 0;
         int nanos = 0;
         field = patternMatcher.group(5);
         if (field != null) {
           BigDecimal bdSeconds = new BigDecimal(field);
-          if (bdSeconds.compareTo(DateUtils.MAX_INT_BD) > 0) {
+          if (bdSeconds.compareTo(IntervalDayTimeUtils.MAX_INT_BD) > 0) {
             throw new IllegalArgumentException("seconds value of " + bdSeconds + " too large");
           }
           seconds = sign * bdSeconds.intValue();
           nanos = sign * bdSeconds.subtract(new BigDecimal(bdSeconds.toBigInteger()))
-              .multiply(DateUtils.NANOS_PER_SEC_BD).intValue();
+              .multiply(IntervalDayTimeUtils.NANOS_PER_SEC_BD).intValue();
         }
 
         result = new HiveIntervalDayTime(days, hours, minutes, seconds, nanos);

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/common/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java b/common/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java
new file mode 100644
index 0000000..3fb0cfd
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+public class RandomTypeUtil {
+
+  public static final long NANOSECONDS_PER_SECOND = TimeUnit.SECONDS.toNanos(1);
+  public static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1);
+  public static final long NANOSECONDS_PER_MILLISSECOND = TimeUnit.MILLISECONDS.toNanos(1);
+
+  private static ThreadLocal<DateFormat> DATE_FORMAT =
+      new ThreadLocal<DateFormat>() {
+        @Override
+        protected DateFormat initialValue() {
+          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        }
+      };
+
+  // We've switched to Joda/Java Calendar which has a more limited time range....
+  public static int MIN_YEAR = 1900;
+  public static int MAX_YEAR = 3000;
+  private static long MIN_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("1900-01-01 00:00:00");
+  private static long MAX_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("3000-01-01 00:00:00");
+
+  private static long parseToMillis(String s) {
+    try {
+      return DATE_FORMAT.get().parse(s).getTime();
+    } catch (ParseException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  public static Timestamp getRandTimestamp(Random r) {
+    return getRandTimestamp(r, MIN_YEAR, MAX_YEAR);
+  }
+
+  public static Timestamp getRandTimestamp(Random r, int minYear, int maxYear) {
+    String optionalNanos = "";
+    switch (r.nextInt(4)) {
+    case 0:
+      // No nanos.
+      break;
+    case 1:
+      optionalNanos = String.format(".%09d",
+          Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_SECOND)));
+      break;
+    case 2:
+      // Limit to milliseconds only...
+      optionalNanos = String.format(".%09d",
+          Integer.valueOf(r.nextInt((int) MILLISECONDS_PER_SECOND)) * NANOSECONDS_PER_MILLISSECOND);
+      break;
+    case 3:
+      // Limit to below milliseconds only...
+      optionalNanos = String.format(".%09d",
+          Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_MILLISSECOND)));
+      break;
+    }
+    String timestampStr = String.format("%04d-%02d-%02d %02d:%02d:%02d%s",
+        Integer.valueOf(minYear + r.nextInt(maxYear - minYear + 1)),  // year
+        Integer.valueOf(1 + r.nextInt(12)),      // month
+        Integer.valueOf(1 + r.nextInt(28)),      // day
+        Integer.valueOf(0 + r.nextInt(24)),      // hour
+        Integer.valueOf(0 + r.nextInt(60)),      // minute
+        Integer.valueOf(0 + r.nextInt(60)),      // second
+        optionalNanos);
+    Timestamp timestampVal;
+    try {
+      timestampVal = Timestamp.valueOf(timestampStr);
+    } catch (Exception e) {
+      System.err.println("Timestamp string " + timestampStr + " did not parse");
+      throw e;
+    }
+    return timestampVal;
+  }
+
+  public static long randomMillis(long minMillis, long maxMillis, Random rand) {
+    return minMillis + (long) ((maxMillis - minMillis) * rand.nextDouble());
+  }
+
+  public static long randomMillis(Random rand) {
+    return randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
+  }
+
+  public static int randomNanos(Random rand, int decimalDigits) {
+    // Only keep the most significant decimalDigits digits.
+    int nanos = rand.nextInt((int) NANOSECONDS_PER_SECOND);
+    return nanos - nanos % (int) Math.pow(10, 9 - decimalDigits);
+  }
+
+  public static int randomNanos(Random rand) {
+    return randomNanos(rand, 9);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/common/src/java/org/apache/hive/common/util/DateUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/DateUtils.java b/common/src/java/org/apache/hive/common/util/DateUtils.java
index c749bcb..c5a1c50 100644
--- a/common/src/java/org/apache/hive/common/util/DateUtils.java
+++ b/common/src/java/org/apache/hive/common/util/DateUtils.java
@@ -56,21 +56,4 @@ public class DateUtils {
     }
     return result;
   }
-
-  public static long getIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime) {
-    return intervalDayTime.getTotalSeconds() * NANOS_PER_SEC + intervalDayTime.getNanos();
-  }
-
-  public static void setIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime,
-      long totalNanos) {
-    intervalDayTime.set(totalNanos / NANOS_PER_SEC, (int) (totalNanos % NANOS_PER_SEC));
-  }
-
-  public static long getIntervalDayTimeTotalSecondsFromTotalNanos(long totalNanos) {
-    return totalNanos / NANOS_PER_SEC;
-  }
-
-  public static int getIntervalDayTimeNanosFromTotalNanos(long totalNanos) {
-    return (int) (totalNanos % NANOS_PER_SEC);
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/common/src/java/org/apache/hive/common/util/IntervalDayTimeUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/IntervalDayTimeUtils.java b/common/src/java/org/apache/hive/common/util/IntervalDayTimeUtils.java
new file mode 100644
index 0000000..727c1e6
--- /dev/null
+++ b/common/src/java/org/apache/hive/common/util/IntervalDayTimeUtils.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.math.BigDecimal;
+import java.text.SimpleDateFormat;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+
+
+/**
+ * DateUtils. Thread-safe class
+ *
+ */
+public class IntervalDayTimeUtils {
+
+  private static final ThreadLocal<SimpleDateFormat> dateFormatLocal = new ThreadLocal<SimpleDateFormat>() {
+    @Override
+    protected SimpleDateFormat initialValue() {
+      return new SimpleDateFormat("yyyy-MM-dd");
+    }
+  };
+
+  public static SimpleDateFormat getDateFormat() {
+    return dateFormatLocal.get();
+  }
+
+  public static final int NANOS_PER_SEC = 1000000000;
+  public static final BigDecimal MAX_INT_BD = new BigDecimal(Integer.MAX_VALUE);
+  public static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(NANOS_PER_SEC);
+
+  public static int parseNumericValueWithRange(String fieldName,
+      String strVal, int minValue, int maxValue) throws IllegalArgumentException {
+    int result = 0;
+    if (strVal != null) {
+      result = Integer.parseInt(strVal);
+      if (result < minValue || result > maxValue) {
+        throw new IllegalArgumentException(String.format("%s value %d outside range [%d, %d]",
+            fieldName, result, minValue, maxValue));
+      }
+    }
+    return result;
+  }
+
+  public static long getIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime) {
+    return intervalDayTime.getTotalSeconds() * NANOS_PER_SEC + intervalDayTime.getNanos();
+  }
+
+  public static void setIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime,
+      long totalNanos) {
+    intervalDayTime.set(totalNanos / NANOS_PER_SEC, (int) (totalNanos % NANOS_PER_SEC));
+  }
+
+  public static long getIntervalDayTimeTotalSecondsFromTotalNanos(long totalNanos) {
+    return totalNanos / NANOS_PER_SEC;
+  }
+
+  public static int getIntervalDayTimeNanosFromTotalNanos(long totalNanos) {
+    return (int) (totalNanos % NANOS_PER_SEC);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/data/files/timestamps.txt
----------------------------------------------------------------------
diff --git a/data/files/timestamps.txt b/data/files/timestamps.txt
new file mode 100644
index 0000000..36ffd23
--- /dev/null
+++ b/data/files/timestamps.txt
@@ -0,0 +1,50 @@
+6631-11-13 16:31:29.702202248
+6731-02-12 08:12:48.287783702
+6705-09-28 18:27:28.000845672
+5397-07-13 07:12:32.000896438
+9209-11-11 04:08:58.223768453
+9403-01-09 18:12:33.547
+6482-04-27 12:07:38.073915413
+7503-06-23 23:14:17.486
+1883-04-17 04:14:34.647766229
+0004-09-22 18:26:29.519542222
+7160-12-02 06:00:24.81200852
+8422-07-22 03:21:45.745036084
+4143-07-08 10:53:27.252802259
+5344-10-04 18:40:08.165
+5966-07-09 03:30:50.597
+9075-06-13 16:20:09.218517797
+1815-05-06 00:12:37.543584705
+7409-09-07 23:33:32.459349602
+5339-02-01 14:10:01.085678691
+4966-12-04 09:30:55.202
+1319-02-02 16:31:57.778
+1404-07-23 15:32:16.059185026
+6229-06-28 02:54:28.970117179
+0528-10-27 08:15:18.941718273
+8521-01-16 20:42:05.668832388
+1976-05-06 00:42:30.910786948
+2003-09-23 22:33:17.00003252
+2007-02-09 05:17:29.368756876
+1998-10-16 20:05:29.397591987
+1976-03-03 04:54:33.000895162
+1985-07-20 09:30:11.0
+2021-09-24 03:18:32.413655165
+2013-04-07 02:44:43.00086821
+2002-05-10 05:29:48.990818073
+1973-04-17 06:30:38.596784156
+1987-02-21 19:48:29.0
+1981-11-15 23:03:10.999338387
+2000-12-18 08:42:30.000595596
+1999-10-03 16:59:10.396903939
+2024-11-11 16:42:41.101
+2013-04-10 00:43:46.854731546
+2010-04-08 02:43:35.861742727
+2004-03-07 20:14:13.0
+1987-05-28 13:52:07.900916635
+1978-08-05 14:41:05.501
+1966-08-16 13:36:50.183618031
+2009-01-21 10:49:07.108
+1981-04-25 09:01:12.077192689
+1985-11-18 16:37:54.0
+1974-10-04 17:21:03.989

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 3229c44..2d9cab8 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -263,6 +263,8 @@ minitez.query.files.shared=acid_globallimit.q,\
   vector_inner_join.q,\
   vector_interval_1.q,\
   vector_interval_2.q,\
+  vector_interval_arithmetic.q,\
+  vector_interval_mapjoin.q,\
   vector_join30.q,\
   vector_join_filters.q,\
   vector_join_nulls.q,\
@@ -287,6 +289,7 @@ minitez.query.files.shared=acid_globallimit.q,\
   vector_partitioned_date_time.q,\
   vector_reduce_groupby_decimal.q,\
   vector_string_concat.q,\
+  vectorized_timestamp.q,\
   vector_varchar_4.q,\
   vector_varchar_mapjoin1.q,\
   vector_varchar_simple.q,\
@@ -333,6 +336,7 @@ minitez.query.files.shared=acid_globallimit.q,\
   vectorized_shufflejoin.q,\
   vectorized_string_funcs.q,\
   vectorized_timestamp_funcs.q,\
+  vectorized_timestamp_ints_casts.q,\
   auto_sortmerge_join_1.q,\
   auto_sortmerge_join_10.q,\
   auto_sortmerge_join_11.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
index f2ec645..fe8f535 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt
@@ -34,6 +34,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  */
 public class <ClassName> extends LongCol<OperatorName>LongColumn {
 
+  private static final long serialVersionUID = 1L;
+
   public <ClassName>(int colNum1, int colNum2, int outputColumn) {
     super(colNum1, colNum2, outputColumn);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt
index 1a360b8..293369f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt
@@ -29,6 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  */
 public class <ClassName> extends LongCol<OperatorName>LongScalar {
 
+  private static final long serialVersionUID = 1L;
+
   public <ClassName>(int colNum, long value, int outputColumn) {
     super(colNum, value, outputColumn);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
index 9d692cb..60884cd 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
@@ -29,6 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  */
 public class <ClassName> extends <BaseClassName> {
 
+  private static final long serialVersionUID = 1L;
+
   public <ClassName>(int colNum, long value, int outputColumn) {
     super(colNum, value, outputColumn);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
index 753ea71..04607f6 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt
@@ -34,6 +34,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  */
 public class <ClassName> extends LongScalar<OperatorName>LongColumn {
 
+  private static final long serialVersionUID = 1L;
+
   public <ClassName>(long value, int colNum, int outputColumn) {
     super(value, colNum, outputColumn);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
index fdd453a..d518c44 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
@@ -34,6 +34,8 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  */
 public class <ClassName> extends <BaseClassName> {
 
+  private static final long serialVersionUID = 1L;
+
   public <ClassName>(long value, int colNum, int outputColumn) {
     super(value, colNum, outputColumn);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
new file mode 100644
index 0000000..c3d8d7e
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
+ * expressions between date and interval year month columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private Date scratchDate1;
+  private HiveIntervalYearMonth scratchIntervalYearMonth2;
+  private Date outputDate;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchDate1 = new Date(0);
+    scratchIntervalYearMonth2 = new HiveIntervalYearMonth();
+    outputDate = new Date(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type date.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type interval_year_month.
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
+
+    // Output is type date.
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+    long[] vector2 = inputColVector2.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      dtm.<OperatorMethod>(
+          scratchDate1, scratchIntervalYearMonth2,  outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+    } else if (inputColVector1.isRepeating) {
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesLong(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
new file mode 100644
index 0000000..d1474fb
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
+ * expressions between a date column and a interval year month scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private HiveIntervalYearMonth value;
+  private int outputColumn;
+  private Date scratchDate1;
+  private Date outputDate;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, long value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new HiveIntervalYearMonth((int) value);
+    this.outputColumn = outputColumn;
+    scratchDate1 = new Date(0);
+    outputDate = new Date(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type date.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type date.
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.<OperatorMethod>(
+          scratchDate1, value, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
new file mode 100644
index 0000000..63cebaf
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateColumnArithmeticTimestampColumn.txt, a class
+ * which covers binary arithmetic expressions between a date column and timestamp column.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private Timestamp scratchTimestamp1;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchTimestamp1 = new Timestamp(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type Date (days).  For the math we convert it to a timestamp.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type <OperandType2>.
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.<OperatorMethod>(
+          scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+    } else if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <HiveOperandType2> value2 = inputColVector2.asScratch<CamelOperandType2>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+         }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+         dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
new file mode 100644
index 0000000..7aee529
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateColumnArithmeticTimestampScalarBase.txt, a base class
+ * which covers binary arithmetic expressions between a date column and a timestamp scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType2> value;
+  private int outputColumn;
+  private Timestamp scratchTimestamp1;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, <HiveOperandType2> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+    scratchTimestamp1 = new Timestamp(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type date (days).  For the math we convert it to a timestamp.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.<OperatorMethod>(
+          scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
new file mode 100644
index 0000000..c68ac34
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Date value;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth2;
+  private Date outputDate;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(long value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new Date(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth2 = new HiveIntervalYearMonth();
+    outputDate = new Date(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type Interval_Year_Month (months).
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type Date.
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+    long[] vector2 = inputColVector2.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      dtm.<OperatorMethod>(
+          value, scratchIntervalYearMonth2, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
new file mode 100644
index 0000000..cb6b750
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateTimeScalarArithmeticTimestampColumnBase.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(long value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    // Scalar input #1 is type date (days).  For the math we convert it to a timestamp.
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type <OperandType2>.
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      dtm.<OperatorMethod>(
+          value, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt
deleted file mode 100644
index cd7a1e7..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.util.DateTimeMath;
-
-/**
- * Generated from template DateTimeColumnArithmeticIntervalColumnWithConvert.txt, which covers binary arithmetic 
- * expressions between columns.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-  
-  private int colNum1;
-  private int colNum2;
-  private int outputColumn;
-  private DateTimeMath dtm = new DateTimeMath();
-
-  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
-    this.colNum1 = colNum1;
-    this.colNum2 = colNum2;
-    this.outputColumn = outputColumn;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
-    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
-    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
-    int[] sel = batch.selected;
-    int n = batch.size;
-    <VectorOperandType1>[] vector1 = inputColVector1.vector;
-    <VectorOperandType2>[] vector2 = inputColVector2.vector;
-    <VectorReturnType>[] outputVector = outputColVector.vector;
-    
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-    
-    outputColVector.isRepeating = 
-         inputColVector1.isRepeating && inputColVector2.isRepeating
-      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
-      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
-    
-    // Handle nulls first  
-    NullUtil.propagateNullsColCol(
-      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
-          
-    /* Disregard nulls for processing. In other words,
-     * the arithmetic operation is performed even if one or 
-     * more inputs are null. This is to improve speed by avoiding
-     * conditional checks in the inner loop.
-     */ 
-    if (inputColVector1.isRepeating && inputColVector2.isRepeating) { 
-      outputVector[0] = <OperatorFunction>(<TypeConversionToMillis>(vector1[0]), <OperatorSymbol> (int) vector2[0]);
-    } else if (inputColVector1.isRepeating) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[0]), <OperatorSymbol> (int) vector2[i]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[0]), <OperatorSymbol> (int) vector2[i]);
-        }
-      }
-    } else if (inputColVector2.isRepeating) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[0]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[0]);
-        }
-      }
-    } else {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[i]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector1[i]), <OperatorSymbol> (int) vector2[i]);
-        }
-      }
-    }
-    
-    /* For the case when the output can have null values, follow 
-     * the convention that the data values must be 1 for long and 
-     * NaN for double. This is to prevent possible later zero-divide errors
-     * in complex arithmetic expressions like col2 / (col1 - 1)
-     * in the case when some col1 entries are null.
-     */
-    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "<VectorReturnType>";
-  }
-  
-  public int getColNum1() {
-    return colNum1;
-  }
-
-  public void setColNum1(int colNum1) {
-    this.colNum1 = colNum1;
-  }
-
-  public int getColNum2() {
-    return colNum2;
-  }
-
-  public void setColNum2(int colNum2) {
-    this.colNum2 = colNum2;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-  
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt
deleted file mode 100644
index abee249..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt
+++ /dev/null
@@ -1,152 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.<InputColumnVectorType>;
-import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.util.DateTimeMath;
-
-/**
- * Generated from template ColumnArithmeticScalarWithConvert.txt, which covers binary arithmetic 
- * expressions between a column and a scalar.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private <VectorOperandType2> value;
-  private int outputColumn;
-  private DateTimeMath dtm = new DateTimeMath();
-
-  public <ClassName>(int colNum, <VectorOperandType2> value, int outputColumn) {
-    this.colNum = colNum;
-    this.value = value;
-    this.outputColumn = outputColumn;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
-    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
-    int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector.isNull;
-    boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector.noNulls;
-    outputColVector.isRepeating = inputColVector.isRepeating;
-    int n = batch.size;
-    <VectorOperandType1>[] vector = inputColVector.vector;
-    <VectorReturnType>[] outputVector = outputColVector.vector;
-    
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    if (inputColVector.isRepeating) {
-      outputVector[0] = <OperatorFunction>(<TypeConversionToMillis>(vector[0]), <OperatorSymbol> (int) value);
-      
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
-    } else if (inputColVector.noNulls) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-        }
-      }
-    } else /* there are nulls */ {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-          outputIsNull[i] = inputIsNull[i];
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-        }
-        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
-      }
-    }
-    
-    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-  
-  @Override
-  public String getOutputType() {
-    return "<VectorReturnType>";
-  }
-  
-  public int getColNum() {
-    return colNum;
-  }
-  
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public <VectorOperandType2> getValue() {
-    return value;
-  }
-
-  public void setValue(<VectorOperandType2> value) {
-    this.value = value;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt
deleted file mode 100644
index 93a441a..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt
+++ /dev/null
@@ -1,165 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-
-
-/*
- * Because of the templatized nature of the code, either or both
- * of these ColumnVector imports may be needed. Listing both of them
- * rather than using ....vectorization.*;
- */
-import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.util.DateTimeMath;
-
-/**
- * Generated from template DateTimeScalarArithmeticIntervalColumnWithConvert.txt.
- * Implements a vectorized arithmetic operator with a scalar on the left and a
- * column vector on the right. The result is output to an output column vector.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private <VectorOperandType1> value;
-  private int outputColumn;
-  private DateTimeMath dtm = new DateTimeMath();
-
-  public <ClassName>(<VectorOperandType1> value, int colNum, int outputColumn) {
-    this.colNum = colNum;
-    this.value = <TypeConversionToMillis>(value);
-    this.outputColumn = outputColumn;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  /**
-   * Method to evaluate scalar-column operation in vectorized fashion.
-   *
-   * @batch a package of rows with each column stored in a vector
-   */
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
-    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
-    int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector.isNull;
-    boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector.noNulls;
-    outputColVector.isRepeating = inputColVector.isRepeating;
-    int n = batch.size;
-    <VectorOperandType2>[] vector = inputColVector.vector;
-    <VectorReturnType>[] outputVector = outputColVector.vector;
-    
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    if (inputColVector.isRepeating) {
-      outputVector[0] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[0]);
-      
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
-    } else if (inputColVector.noNulls) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-        }
-      }
-    } else {                         /* there are nulls */ 
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-          outputIsNull[i] = inputIsNull[i];
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-        }
-        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
-      }
-    }
-    
-    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-  
-  @Override
-  public String getOutputType() {
-    return "<VectorReturnType>";
-  }
-  
-  public int getColNum() {
-    return colNum;
-  }
-
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public <VectorOperandType1> getValue() {
-    return value;
-  }
-
-  public void setValue(<VectorOperandType1> value) {
-    this.value = value;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.SCALAR,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt
index 55193ac..2351230 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt
@@ -18,8 +18,6 @@
  
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**


[12/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 43c7f3d..0ce1660 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -53,20 +53,27 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
 import org.apache.hadoop.hive.ql.exec.vector.AggregateDefinition;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFAvgDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFAvgTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCount;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCountMerge;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFCountStar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFStdPopTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFStdSampTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFSumDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFVarPopTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorUDAFVarSampTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFAvgLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFStdPopDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFStdPopDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFStdPopLong;
@@ -933,20 +940,16 @@ public class VectorizationContext {
     case DATE:
       return new ConstantVectorExpression(outCol, DateWritable.dateToDays((Date) constantValue));
     case TIMESTAMP:
-      return new ConstantVectorExpression(outCol, TimestampUtils.getTimeNanoSec((Timestamp) constantValue));
+      return new ConstantVectorExpression(outCol, (Timestamp) constantValue);
     case INTERVAL_YEAR_MONTH:
       return new ConstantVectorExpression(outCol,
           ((HiveIntervalYearMonth) constantValue).getTotalMonths());
     case INTERVAL_DAY_TIME:
-      return new ConstantVectorExpression(outCol,
-          DateUtils.getIntervalDayTimeTotalNanos((HiveIntervalDayTime) constantValue));
+      return new ConstantVectorExpression(outCol, (HiveIntervalDayTime) constantValue);
     case FLOAT_FAMILY:
       return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue());
     case DECIMAL:
-      VectorExpression ve = new ConstantVectorExpression(outCol, (HiveDecimal) constantValue);
-      // Set type name with decimal precision, scale, etc.
-      ve.setOutputType(typeName);
-      return ve;
+      return new ConstantVectorExpression(outCol, (HiveDecimal) constantValue, typeName);
     case STRING:
       return new ConstantVectorExpression(outCol, ((String) constantValue).getBytes());
     case CHAR:
@@ -1243,8 +1246,8 @@ public class VectorizationContext {
     VectorExpression ve = getVectorExpressionForUdf(udf, udf.getClass(), childExpr, mode, returnType);
 
     // Replace with the milliseconds conversion
-    if (!udf.isIntToTimestampInSeconds() && ve instanceof CastLongToTimestampViaLongToLong) {
-      ve = createVectorExpression(CastMillisecondsLongToTimestampViaLongToLong.class,
+    if (!udf.isIntToTimestampInSeconds() && ve instanceof CastLongToTimestamp) {
+      ve = createVectorExpression(CastMillisecondsLongToTimestamp.class,
           childExpr, Mode.PROJECTION, returnType);
     }
 
@@ -1529,13 +1532,13 @@ public class VectorizationContext {
       expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, returnType);
       ((ILongInExpr) expr).setInListValues(inVals);
     } else if (isTimestampFamily(colType)) {
-      cl = (mode == Mode.FILTER ? FilterLongColumnInList.class : LongColumnInList.class);
-      long[] inVals = new long[childrenForInList.size()];
+      cl = (mode == Mode.FILTER ? FilterTimestampColumnInList.class : TimestampColumnInList.class);
+      Timestamp[] inVals = new Timestamp[childrenForInList.size()];
       for (int i = 0; i != inVals.length; i++) {
         inVals[i] = getTimestampScalar(childrenForInList.get(i));
       }
       expr = createVectorExpression(cl, childExpr.subList(0, 1), Mode.PROJECTION, returnType);
-      ((ILongInExpr) expr).setInListValues(inVals);
+      ((ITimestampInExpr) expr).setInListValues(inVals);
     } else if (isStringFamily(colType)) {
       cl = (mode == Mode.FILTER ? FilterStringColumnInList.class : StringColumnInList.class);
       byte[][] inVals = new byte[childrenForInList.size()][];
@@ -1835,7 +1838,7 @@ public class VectorizationContext {
     if (isIntFamily(inputType)) {
       return createVectorExpression(CastLongToDouble.class, childExpr, Mode.PROJECTION, returnType);
     } else if (inputType.equals("timestamp")) {
-      return createVectorExpression(CastTimestampToDoubleViaLongToDouble.class, childExpr, Mode.PROJECTION,
+      return createVectorExpression(CastTimestampToDouble.class, childExpr, Mode.PROJECTION,
           returnType);
     } else if (isFloatFamily(inputType)) {
 
@@ -1979,20 +1982,10 @@ public class VectorizationContext {
       cl = FilterCharColumnBetween.class;
     } else if (charTypePattern.matcher(colType).matches() && notKeywordPresent) {
       cl = FilterCharColumnNotBetween.class;
-    } else if (colType.equals("timestamp")) {
-
-      // Get timestamp boundary values as longs instead of the expected strings
-      long left = getTimestampScalar(childExpr.get(2));
-      long right = getTimestampScalar(childExpr.get(3));
-      childrenAfterNot = new ArrayList<ExprNodeDesc>();
-      childrenAfterNot.add(colExpr);
-      childrenAfterNot.add(new ExprNodeConstantDesc(left));
-      childrenAfterNot.add(new ExprNodeConstantDesc(right));
-      if (notKeywordPresent) {
-        cl = FilterLongColumnNotBetween.class;
-      } else {
-        cl = FilterLongColumnBetween.class;
-      }
+    } else if (colType.equals("timestamp") && !notKeywordPresent) {
+      cl = FilterTimestampColumnBetween.class;
+    } else if (colType.equals("timestamp") && notKeywordPresent) {
+      cl = FilterTimestampColumnNotBetween.class;
     } else if (isDecimalFamily(colType) && !notKeywordPresent) {
       cl = FilterDecimalColumnBetween.class;
     } else if (isDecimalFamily(colType) && notKeywordPresent) {
@@ -2062,6 +2055,7 @@ public class VectorizationContext {
 
     // Make vectorized operator
     String normalizedName = getNormalizedName(resultTypeName);
+
     VectorExpression ve = new VectorUDFAdaptor(expr, outputCol, normalizedName, argDescs);
 
     // Set child expressions
@@ -2179,21 +2173,17 @@ public class VectorizationContext {
     VectorExpression.Type type = VectorExpression.Type.getValue(t);
     Object scalarValue = getScalarValue(constDesc);
     switch (type) {
-      case TIMESTAMP:
-        return TimestampUtils.getTimeNanoSec((Timestamp) scalarValue);
       case DATE:
         return DateWritable.dateToDays((Date) scalarValue);
       case INTERVAL_YEAR_MONTH:
         return ((HiveIntervalYearMonth) scalarValue).getTotalMonths();
-      case INTERVAL_DAY_TIME:
-        return DateUtils.getIntervalDayTimeTotalNanos((HiveIntervalDayTime) scalarValue);
       default:
         return scalarValue;
     }
   }
 
-  // Get a timestamp as a long in number of nanos, from a string constant or cast
-  private long getTimestampScalar(ExprNodeDesc expr) throws HiveException {
+  // Get a timestamp from a string constant or cast
+  private Timestamp getTimestampScalar(ExprNodeDesc expr) throws HiveException {
     if (expr instanceof ExprNodeGenericFuncDesc &&
         ((ExprNodeGenericFuncDesc) expr).getGenericUDF() instanceof GenericUDFTimestamp) {
       return evaluateCastToTimestamp(expr);
@@ -2221,7 +2211,7 @@ public class VectorizationContext {
         + "Expecting string.");
   }
 
-  private long evaluateCastToTimestamp(ExprNodeDesc expr) throws HiveException {
+  private Timestamp evaluateCastToTimestamp(ExprNodeDesc expr) throws HiveException {
     ExprNodeGenericFuncDesc expr2 = (ExprNodeGenericFuncDesc) expr;
     ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr2);
     ObjectInspector output = evaluator.initialize(null);
@@ -2232,7 +2222,7 @@ public class VectorizationContext {
       throw new HiveException("Udf: failed to convert to timestamp");
     }
     Timestamp ts = (Timestamp) java;
-    return TimestampUtils.getTimeNanoSec(ts);
+    return ts;
   }
 
   private Constructor<?> getConstructor(Class<?> cl) throws HiveException {
@@ -2321,7 +2311,7 @@ public class VectorizationContext {
     }
   }
 
-  public static ColumnVector.Type getColumnVectorTypeFromTypeInfo(TypeInfo typeInfo) throws HiveException {
+  public static ColumnVector.Type getColumnVectorTypeFromTypeInfo(TypeInfo typeInfo) {
     switch (typeInfo.getCategory()) {
       case STRUCT:
         return Type.STRUCT;
@@ -2342,11 +2332,15 @@ public class VectorizationContext {
           case INT:
           case LONG:
           case DATE:
-          case TIMESTAMP:
           case INTERVAL_YEAR_MONTH:
-          case INTERVAL_DAY_TIME:
             return ColumnVector.Type.LONG;
 
+          case TIMESTAMP:
+            return ColumnVector.Type.TIMESTAMP;
+
+          case INTERVAL_DAY_TIME:
+            return ColumnVector.Type.INTERVAL_DAY_TIME;
+
           case FLOAT:
           case DOUBLE:
             return ColumnVector.Type.DOUBLE;
@@ -2375,47 +2369,59 @@ public class VectorizationContext {
   // TODO:   And, investigate if different reduce-side versions are needed for var* and std*, or if map-side aggregate can be used..  Right now they are conservatively
   //         marked map-side (HASH).
   static ArrayList<AggregateDefinition> aggregatesDefinition = new ArrayList<AggregateDefinition>() {{
-    add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY,    null,                          VectorUDAFMinLong.class));
+    add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.INT_DATE_INTERVAL_YEAR_MONTH,    null,                          VectorUDAFMinLong.class));
     add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           null,                          VectorUDAFMinDouble.class));
     add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,          null,                          VectorUDAFMinString.class));
     add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                null,                          VectorUDAFMinDecimal.class));
-    add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY,    null,                          VectorUDAFMaxLong.class));
+    add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              null,                          VectorUDAFMinTimestamp.class));
+    add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.INT_DATE_INTERVAL_YEAR_MONTH,    null,                          VectorUDAFMaxLong.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           null,                          VectorUDAFMaxDouble.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,          null,                          VectorUDAFMaxString.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                null,                          VectorUDAFMaxDecimal.class));
+    add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              null,                          VectorUDAFMaxTimestamp.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.NONE,                   GroupByDesc.Mode.HASH,         VectorUDAFCountStar.class));
-    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY,    GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
+    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INT_DATE_INTERVAL_YEAR_MONTH,    GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.MERGEPARTIAL, VectorUDAFCountMerge.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,          GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
+    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
+    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INTERVAL_DAY_TIME,      GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("sum",         VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             null,                          VectorUDAFSumLong.class));
     add(new AggregateDefinition("sum",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           null,                          VectorUDAFSumDouble.class));
     add(new AggregateDefinition("sum",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                null,                          VectorUDAFSumDecimal.class));
-    add(new AggregateDefinition("avg",         VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFAvgLong.class));
+    add(new AggregateDefinition("avg",         VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFAvgLong.class));
     add(new AggregateDefinition("avg",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFAvgDouble.class));
     add(new AggregateDefinition("avg",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFAvgDecimal.class));
-    add(new AggregateDefinition("variance",    VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFVarPopLong.class));
-    add(new AggregateDefinition("var_pop",     VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFVarPopLong.class));
+    add(new AggregateDefinition("avg",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFAvgTimestamp.class));
+    add(new AggregateDefinition("variance",    VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFVarPopLong.class));
+    add(new AggregateDefinition("var_pop",     VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFVarPopLong.class));
     add(new AggregateDefinition("variance",    VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFVarPopDouble.class));
     add(new AggregateDefinition("var_pop",     VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFVarPopDouble.class));
     add(new AggregateDefinition("variance",    VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFVarPopDecimal.class));
     add(new AggregateDefinition("var_pop",     VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFVarPopDecimal.class));
-    add(new AggregateDefinition("var_samp",    VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFVarSampLong.class));
+    add(new AggregateDefinition("variance",    VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFVarPopTimestamp.class));
+    add(new AggregateDefinition("var_pop",     VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFVarPopTimestamp.class));
+    add(new AggregateDefinition("var_samp",    VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFVarSampLong.class));
     add(new AggregateDefinition("var_samp" ,   VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFVarSampDouble.class));
     add(new AggregateDefinition("var_samp" ,   VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFVarSampDecimal.class));
-    add(new AggregateDefinition("std",         VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFStdPopLong.class));
-    add(new AggregateDefinition("stddev",      VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFStdPopLong.class));
-    add(new AggregateDefinition("stddev_pop",  VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFStdPopLong.class));
+    add(new AggregateDefinition("var_samp" ,   VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFVarSampTimestamp.class));
+    add(new AggregateDefinition("std",         VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFStdPopLong.class));
+    add(new AggregateDefinition("stddev",      VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFStdPopLong.class));
+    add(new AggregateDefinition("stddev_pop",  VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFStdPopLong.class));
     add(new AggregateDefinition("std",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFStdPopDouble.class));
     add(new AggregateDefinition("stddev",      VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFStdPopDouble.class));
     add(new AggregateDefinition("stddev_pop",  VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFStdPopDouble.class));
     add(new AggregateDefinition("std",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFStdPopDecimal.class));
     add(new AggregateDefinition("stddev",      VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFStdPopDecimal.class));
     add(new AggregateDefinition("stddev_pop",  VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFStdPopDecimal.class));
-    add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.INT_TIMESTAMP_FAMILY,   GroupByDesc.Mode.HASH,         VectorUDAFStdSampLong.class));
+    add(new AggregateDefinition("std",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFStdPopTimestamp.class));
+    add(new AggregateDefinition("stddev",      VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFStdPopTimestamp.class));
+    add(new AggregateDefinition("stddev_pop",  VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFStdPopTimestamp.class));
+    add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.HASH,         VectorUDAFStdSampLong.class));
     add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFStdSampDouble.class));
     add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFStdSampDecimal.class));
+    add(new AggregateDefinition("stddev_samp", VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFStdSampTimestamp.class));
   }};
 
   public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc, boolean isReduce)

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index d258e2d..1482855 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -138,11 +138,13 @@ public class VectorizedBatchUtil {
           case SHORT:
           case INT:
           case LONG:
-          case TIMESTAMP:
           case DATE:
           case INTERVAL_YEAR_MONTH:
-          case INTERVAL_DAY_TIME:
             return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
+          case TIMESTAMP:
+            return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
+          case INTERVAL_DAY_TIME:
+            return new IntervalDayTimeColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
           case FLOAT:
           case DOUBLE:
             return new DoubleColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
@@ -390,13 +392,12 @@ public class VectorizedBatchUtil {
     }
       break;
     case TIMESTAMP: {
-      LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
+      TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
       if (writableCol != null) {
-        Timestamp t = ((TimestampWritable) writableCol).getTimestamp();
-        lcv.vector[rowIndex] = TimestampUtils.getTimeNanoSec(t);
+        lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
         lcv.isNull[rowIndex] = false;
       } else {
-        lcv.vector[rowIndex] = 1;
+        lcv.setNullValue(rowIndex);
         setNullColIsNullValue(lcv, rowIndex);
       }
     }
@@ -414,14 +415,14 @@ public class VectorizedBatchUtil {
     }
       break;
     case INTERVAL_DAY_TIME: {
-      LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
+      IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[offset + colIndex];
       if (writableCol != null) {
-        HiveIntervalDayTime i = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
-        lcv.vector[rowIndex] = DateUtils.getIntervalDayTimeTotalNanos(i);
-        lcv.isNull[rowIndex] = false;
+        HiveIntervalDayTime idt = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
+        icv.set(rowIndex, idt);
+        icv.isNull[rowIndex] = false;
       } else {
-        lcv.vector[rowIndex] = 1;
-        setNullColIsNullValue(lcv, rowIndex);
+        icv.setNullValue(rowIndex);
+        setNullColIsNullValue(icv, rowIndex);
       }
     }
       break;
@@ -580,6 +581,10 @@ public class VectorizedBatchUtil {
       return new DecimalColumnVector(decColVector.vector.length,
           decColVector.precision,
           decColVector.scale);
+    } else if (source instanceof TimestampColumnVector) {
+      return new TimestampColumnVector(((TimestampColumnVector) source).getLength());
+    } else if (source instanceof IntervalDayTimeColumnVector) {
+      return new IntervalDayTimeColumnVector(((IntervalDayTimeColumnVector) source).getLength());
     } else if (source instanceof ListColumnVector) {
       ListColumnVector src = (ListColumnVector) source;
       ColumnVector child = cloneColumnVector(src.child);
@@ -623,7 +628,8 @@ public class VectorizedBatchUtil {
         newBatch.cols[i].init();
       }
     }
-    newBatch.projectedColumns = Arrays.copyOf(batch.projectedColumns, batch.projectedColumns.length);
+    newBatch.projectedColumns = Arrays.copyOf(batch.projectedColumns,
+        batch.projectedColumns.length);
     newBatch.projectionSize = batch.projectionSize;
     newBatch.reset();
     return newBatch;
@@ -678,6 +684,13 @@ public class VectorizedBatchUtil {
             }
           } else if (colVector instanceof DecimalColumnVector) {
             sb.append(((DecimalColumnVector) colVector).vector[index].toString());
+          } else if (colVector instanceof TimestampColumnVector) {
+            Timestamp timestamp = new Timestamp(0);
+            ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
+            sb.append(timestamp.toString());
+          } else if (colVector instanceof IntervalDayTimeColumnVector) {
+            HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
+            sb.append(intervalDayTime.toString());
           } else {
             sb.append("Unknown");
           }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
index efb06b2..014f097 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
@@ -375,13 +375,13 @@ public class VectorizedRowBatchCtx {
         break;
         
         case TIMESTAMP: {
-          LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+          TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[colIndex];
           if (value == null) {
             lcv.noNulls = false;
             lcv.isNull[0] = true;
             lcv.isRepeating = true;
-          } else { 
-            lcv.fill(TimestampUtils.getTimeNanoSec((Timestamp) value));
+          } else {
+            lcv.fill((Timestamp) value);
             lcv.isNull[0] = false;
           }
         }
@@ -400,14 +400,14 @@ public class VectorizedRowBatchCtx {
         }
 
         case INTERVAL_DAY_TIME: {
-          LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
+          IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[colIndex];
           if (value == null) {
-            lcv.noNulls = false;
-            lcv.isNull[0] = true;
-            lcv.isRepeating = true;
+            icv.noNulls = false;
+            icv.isNull[0] = true;
+            icv.isRepeating = true;
           } else {
-            lcv.fill(DateUtils.getIntervalDayTimeTotalNanos((HiveIntervalDayTime) value));
-            lcv.isNull[0] = false;
+            icv.fill(((HiveIntervalDayTime) value));
+            icv.isNull[0] = false;
           }
         }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
index a52cf19..6225ade 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
@@ -18,20 +18,23 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
  * Type cast decimal to timestamp. The decimal value is interpreted
  * as NNNN.DDDDDDDDD where NNNN is a number of seconds and DDDDDDDDD
  * is a number of nano-seconds.
  */
-public class CastDecimalToTimestamp extends FuncDecimalToLong {
+public class CastDecimalToTimestamp extends FuncDecimalToTimestamp {
   private static final long serialVersionUID = 1L;
 
-  private static transient HiveDecimal tenE9 = HiveDecimal.create(1000000000);
-
   public CastDecimalToTimestamp(int inputColumn, int outputColumn) {
     super(inputColumn, outputColumn);
   }
@@ -40,13 +43,7 @@ public class CastDecimalToTimestamp extends FuncDecimalToLong {
   }
 
   @Override
-  protected void func(LongColumnVector outV, DecimalColumnVector inV,  int i) {
-    HiveDecimal result = inV.vector[i].getHiveDecimal().multiply(tenE9);
-    if (result == null) {
-      outV.noNulls = false;
-      outV.isNull[i] = true;
-    } else {
-      outV.vector[i] = result.longValue();
-    }
+  protected void func(TimestampColumnVector outV, DecimalColumnVector inV,  int i) {
+    outV.set(i, TimestampWritable.decimalToTimestamp(inV.vector[i].getHiveDecimal()));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
new file mode 100644
index 0000000..31d2f78
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class CastDoubleToTimestamp extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private int outputColumn;
+
+  public CastDoubleToTimestamp(int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+  }
+
+  public CastDoubleToTimestamp() {
+    super();
+  }
+
+  private void setDouble(TimestampColumnVector timestampColVector,
+      double[] vector, int elementNum) {
+    TimestampWritable.setTimestampFromDouble(
+        timestampColVector.getScratchTimestamp(), vector[elementNum]);
+    timestampColVector.setFromScratchTimestamp(elementNum);
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    DoubleColumnVector inputColVector = (DoubleColumnVector) batch.cols[colNum];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    int n = batch.size;
+    double[] vector = inputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      //All must be selected otherwise size would be zero
+      //Repeating property will not change.
+      setDouble(outputColVector, vector, 0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+      outputColVector.isRepeating = true;
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          setDouble(outputColVector, vector, i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          setDouble(outputColVector, vector, i);
+        }
+      }
+      outputColVector.isRepeating = false;
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          setDouble(outputColVector, vector, i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          setDouble(outputColVector, vector, i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+      outputColVector.isRepeating = false;
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("double"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
index 32cefea..ceefd61 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
@@ -63,62 +63,6 @@ public class CastLongToDate extends VectorExpression {
     }
 
     switch (inputTypes[0]) {
-      case TIMESTAMP:
-        if (inV.noNulls) {
-          outV.noNulls = true;
-          if (inV.isRepeating) {
-            outV.isRepeating = true;
-            date.setTime(inV.vector[0] / 1000000);
-            outV.vector[0] = DateWritable.dateToDays(date);
-          } else if (batch.selectedInUse) {
-            for(int j = 0; j != n; j++) {
-              int i = sel[j];
-              date.setTime(inV.vector[i] / 1000000);
-              outV.vector[i] = DateWritable.dateToDays(date);
-            }
-            outV.isRepeating = false;
-          } else {
-            for(int i = 0; i != n; i++) {
-              date.setTime(inV.vector[i] / 1000000);
-              outV.vector[i] = DateWritable.dateToDays(date);
-            }
-            outV.isRepeating = false;
-          }
-        } else {
-
-          // Handle case with nulls. Don't do function if the value is null,
-          // because the data may be undefined for a null value.
-          outV.noNulls = false;
-          if (inV.isRepeating) {
-            outV.isRepeating = true;
-            outV.isNull[0] = inV.isNull[0];
-            if (!inV.isNull[0]) {
-              date.setTime(inV.vector[0] / 1000000);
-              outV.vector[0] = DateWritable.dateToDays(date);
-            }
-          } else if (batch.selectedInUse) {
-            for(int j = 0; j != n; j++) {
-              int i = sel[j];
-              outV.isNull[i] = inV.isNull[i];
-              if (!inV.isNull[i]) {
-                date.setTime(inV.vector[i] / 1000000);
-                outV.vector[i] = DateWritable.dateToDays(date);
-              }
-            }
-            outV.isRepeating = false;
-          } else {
-            System.arraycopy(inV.isNull, 0, outV.isNull, 0, n);
-            for(int i = 0; i != n; i++) {
-              if (!inV.isNull[i]) {
-                date.setTime(inV.vector[i] / 1000000);
-                outV.vector[i] = DateWritable.dateToDays(date);
-              }
-            }
-            outV.isRepeating = false;
-          }
-        }
-        break;
-
       case DATE:
         inV.copySelected(batch.selectedInUse, batch.selected, batch.size, outV);
         break;
@@ -155,7 +99,7 @@ public class CastLongToDate extends VectorExpression {
     b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.DATETIME_FAMILY)
+            VectorExpressionDescriptor.ArgumentType.DATE)
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN);
     return b.build();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
new file mode 100644
index 0000000..a2ee52d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class CastLongToTimestamp extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private int outputColumn;
+
+  public CastLongToTimestamp(int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+  }
+
+  public CastLongToTimestamp() {
+    super();
+  }
+
+  private void setSeconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
+    TimestampWritable.setTimestampFromLong(
+        timestampColVector.getScratchTimestamp(), vector[elementNum],
+        /* intToTimestampInSeconds */ true);
+    timestampColVector.setFromScratchTimestamp(elementNum);
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    LongColumnVector inputColVector = (LongColumnVector) batch.cols[colNum];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    int n = batch.size;
+    long[] vector = inputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      //All must be selected otherwise size would be zero
+      //Repeating property will not change.
+      setSeconds(outputColVector, vector, 0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+      outputColVector.isRepeating = true;
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          setSeconds(outputColVector, vector, i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          setSeconds(outputColVector, vector, i);
+        }
+      }
+      outputColVector.isRepeating = false;
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          setSeconds(outputColVector, vector, i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          setSeconds(outputColVector, vector, i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+      outputColVector.isRepeating = false;
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("long"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
new file mode 100644
index 0000000..01c8810
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class CastMillisecondsLongToTimestamp extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private int outputColumn;
+
+  public CastMillisecondsLongToTimestamp(int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+  }
+
+  public CastMillisecondsLongToTimestamp() {
+    super();
+  }
+
+  private void setMilliseconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
+    TimestampWritable.setTimestampFromLong(
+        timestampColVector.getScratchTimestamp(), vector[elementNum],
+        /* intToTimestampInSeconds */ false);
+    timestampColVector.setFromScratchTimestamp(elementNum);
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    LongColumnVector inputColVector = (LongColumnVector) batch.cols[colNum];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    int n = batch.size;
+    long[] vector = inputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      //All must be selected otherwise size would be zero
+      //Repeating property will not change.
+      setMilliseconds(outputColVector, vector, 0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+      outputColVector.isRepeating = true;
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          setMilliseconds(outputColVector, vector, i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          setMilliseconds(outputColVector, vector, i);
+        }
+      }
+      outputColVector.isRepeating = false;
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          setMilliseconds(outputColVector, vector, i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          setMilliseconds(outputColVector, vector, i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+      outputColVector.isRepeating = false;
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("long"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
index 518d5d5..c8844c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -55,7 +56,7 @@ public class CastStringToIntervalDayTime extends VectorExpression {
     BytesColumnVector inV = (BytesColumnVector) batch.cols[inputColumn];
     int[] sel = batch.selected;
     int n = batch.size;
-    LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn];
+    IntervalDayTimeColumnVector outV = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     if (n == 0) {
 
@@ -112,13 +113,13 @@ public class CastStringToIntervalDayTime extends VectorExpression {
     }
   }
 
-  private void evaluate(LongColumnVector outV, BytesColumnVector inV, int i) {
+  private void evaluate(IntervalDayTimeColumnVector outV, BytesColumnVector inV, int i) {
     try {
       HiveIntervalDayTime interval = HiveIntervalDayTime.valueOf(
           new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8"));
-      outV.vector[i] = DateUtils.getIntervalDayTimeTotalNanos(interval);
+      outV.set(i, interval);
     } catch (Exception e) {
-      outV.vector[i] = 1;
+      outV.setNullValue(i);
       outV.isNull[i] = true;
       outV.noNulls = false;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
new file mode 100644
index 0000000..b8a58cd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+public class CastTimestampToBoolean extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private int outputColumn;
+
+  public CastTimestampToBoolean(int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+  }
+
+  public CastTimestampToBoolean() {
+    super();
+  }
+
+  private int toBool(TimestampColumnVector timestampColVector, int index) {
+    return (timestampColVector.getTime(index) != 0 ||
+            timestampColVector.getNanos(index) != 0) ? 1 : 0;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      //All must be selected otherwise size would be zero
+      //Repeating property will not change.
+      outputVector[0] =  toBool(inputColVector, 0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+      outputColVector.isRepeating = true;
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] =  toBool(inputColVector, i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] =  toBool(inputColVector, i);
+        }
+      }
+      outputColVector.isRepeating = false;
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] =  toBool(inputColVector, i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] =  toBool(inputColVector, i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+      outputColVector.isRepeating = false;
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
new file mode 100644
index 0000000..4e3e62c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * To be used to cast timestamp to decimal.
+ */
+public class CastTimestampToDate extends FuncTimestampToLong {
+
+  private static final long serialVersionUID = 1L;
+
+  public CastTimestampToDate() {
+    super();
+    this.outputType = "date";
+  }
+
+  public CastTimestampToDate(int inputColumn, int outputColumn) {
+    super(inputColumn, outputColumn);
+    this.outputType = "date";
+  }
+
+  @Override
+  protected void func(LongColumnVector outV, TimestampColumnVector inV, int i) {
+
+    outV.vector[i] = DateWritable.millisToDays(inV.getTime(i));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
index 0aedddc..e5bfb15 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
@@ -18,14 +18,14 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
  * To be used to cast timestamp to decimal.
  */
-public class CastTimestampToDecimal extends FuncLongToDecimal {
+public class CastTimestampToDecimal extends FuncTimestampToDecimal {
 
   private static final long serialVersionUID = 1L;
 
@@ -38,12 +38,7 @@ public class CastTimestampToDecimal extends FuncLongToDecimal {
   }
 
   @Override
-  protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) {
-
-    // The resulting decimal value is 10e-9 * the input long value (i.e. seconds).
-    //
-    HiveDecimal result = HiveDecimal.create(inV.vector[i]);
-    result = result.scaleByPowerOfTen(-9);
-    outV.set(i, result);
+  protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i) {
+    outV.set(i, TimestampWritable.getHiveDecimal(inV.asScratchTimestamp(i)));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
new file mode 100644
index 0000000..a955d79
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
+public class CastTimestampToDouble extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private int outputColumn;
+
+  public CastTimestampToDouble(int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+  }
+
+  public CastTimestampToDouble() {
+    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    DoubleColumnVector outputColVector = (DoubleColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    int n = batch.size;
+    double[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      //All must be selected otherwise size would be zero
+      //Repeating property will not change.
+      outputVector[0] = inputColVector.getDouble(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+      outputColVector.isRepeating = true;
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] =  inputColVector.getDouble(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] =  inputColVector.getDouble(i);
+        }
+      }
+      outputColVector.isRepeating = false;
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector.getDouble(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector.getDouble(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+      outputColVector.isRepeating = false;
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "double";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
new file mode 100644
index 0000000..ba2e823
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+public class CastTimestampToLong extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private int outputColumn;
+
+  public CastTimestampToLong(int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+  }
+
+  public CastTimestampToLong() {
+    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      this.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector.noNulls;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.isRepeating) {
+      //All must be selected otherwise size would be zero
+      //Repeating property will not change.
+      outputVector[0] =  inputColVector.getTimestampAsLong(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+      outputColVector.isRepeating = true;
+    } else if (inputColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
+        }
+      }
+      outputColVector.isRepeating = false;
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+      outputColVector.isRepeating = false;
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
index 8d75cf3..8a743f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
@@ -18,10 +18,15 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 /**
  * Constant is represented as a vector with repeating values.
@@ -30,21 +35,16 @@ public class ConstantVectorExpression extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
-  private static enum Type {
-    LONG,
-    DOUBLE,
-    BYTES,
-    DECIMAL
-  }
-
   private int outputColumn;
   protected long longValue = 0;
   private double doubleValue = 0;
   private byte[] bytesValue = null;
   private HiveDecimal decimalValue = null;
+  private Timestamp timestampValue = null;
+  private HiveIntervalDayTime intervalDayTimeValue = null;
   private boolean isNullValue = false;
 
-  private Type type;
+  private ColumnVector.Type type;
   private int bytesValueLength = 0;
 
   public ConstantVectorExpression() {
@@ -82,11 +82,22 @@ public class ConstantVectorExpression extends VectorExpression {
     setBytesValue(value.getValue().getBytes());
   }
 
-  public ConstantVectorExpression(int outputColumn, HiveDecimal value) {
-    this(outputColumn, "decimal");
+  // Include type name for precision/scale.
+  public ConstantVectorExpression(int outputColumn, HiveDecimal value, String typeName) {
+    this(outputColumn, typeName);
     setDecimalValue(value);
   }
 
+  public ConstantVectorExpression(int outputColumn, Timestamp value) {
+    this(outputColumn, "timestamp");
+    setTimestampValue(value);
+  }
+
+  public ConstantVectorExpression(int outputColumn, HiveIntervalDayTime value) {
+    this(outputColumn, "interval_day_time");
+    setIntervalDayTimeValue(value);
+  }
+
   /*
    * Support for null constant object
    */
@@ -140,6 +151,28 @@ public class ConstantVectorExpression extends VectorExpression {
     }
   }
 
+  private void evaluateTimestamp(VectorizedRowBatch vrg) {
+    TimestampColumnVector dcv = (TimestampColumnVector) vrg.cols[outputColumn];
+    dcv.isRepeating = true;
+    dcv.noNulls = !isNullValue;
+    if (!isNullValue) {
+      dcv.set(0, timestampValue);
+    } else {
+      dcv.isNull[0] = true;
+    }
+  }
+
+  private void evaluateIntervalDayTime(VectorizedRowBatch vrg) {
+    IntervalDayTimeColumnVector dcv = (IntervalDayTimeColumnVector) vrg.cols[outputColumn];
+    dcv.isRepeating = true;
+    dcv.noNulls = !isNullValue;
+    if (!isNullValue) {
+      dcv.set(0, intervalDayTimeValue);
+    } else {
+      dcv.isNull[0] = true;
+    }
+  }
+
   @Override
   public void evaluate(VectorizedRowBatch vrg) {
     switch (type) {
@@ -155,6 +188,12 @@ public class ConstantVectorExpression extends VectorExpression {
     case DECIMAL:
       evaluateDecimal(vrg);
       break;
+    case TIMESTAMP:
+      evaluateTimestamp(vrg);
+      break;
+    case INTERVAL_DAY_TIME:
+      evaluateIntervalDayTime(vrg);
+      break;
     }
   }
 
@@ -192,39 +231,40 @@ public class ConstantVectorExpression extends VectorExpression {
     this.decimalValue = decimalValue;
   }
 
-  public String getTypeString() {
-    return getOutputType();
+  public HiveDecimal getDecimalValue() {
+    return decimalValue;
   }
 
-  public void setTypeString(String typeString) {
-    this.outputType = typeString;
-    if (VectorizationContext.isStringFamily(typeString)) {
-      this.type = Type.BYTES;
-    } else if (VectorizationContext.isFloatFamily(typeString)) {
-      this.type = Type.DOUBLE;
-    } else if (VectorizationContext.isDecimalFamily(typeString)){
-      this.type = Type.DECIMAL;
-    } else {
-      // everything else that does not belong to string, double, decimal is treated as long.
-      this.type = Type.LONG;
-    }
+  public void setTimestampValue(Timestamp timestampValue) {
+    this.timestampValue = timestampValue;
   }
 
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
+  public Timestamp getTimestampValue() {
+    return timestampValue;
   }
 
-  public Type getType() {
-    return type;
+  public void setIntervalDayTimeValue(HiveIntervalDayTime intervalDayTimeValue) {
+    this.intervalDayTimeValue = intervalDayTimeValue;
   }
 
-  public void setType(Type type) {
-    this.type = type;
+  public HiveIntervalDayTime getIntervalDayTimeValue() {
+    return intervalDayTimeValue;
   }
 
-  @Override
-  public void setOutputType(String type) {
-    setTypeString(type);
+  public String getTypeString() {
+    return getOutputType();
+  }
+
+  private void setTypeString(String typeString) {
+    this.outputType = typeString;
+
+    String typeName = VectorizationContext.mapTypeNameSynonyms(outputType);
+    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
+    this.type = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
new file mode 100644
index 0000000..fafacce
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+// A type date (LongColumnVector storing epoch days) minus a type date produces a
+// type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 longs).
+public class DateColSubtractDateColumn extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private Timestamp scratchTimestamp1;
+  private Timestamp scratchTimestamp2;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public DateColSubtractDateColumn(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchTimestamp1 = new Timestamp(0);
+    scratchTimestamp2 = new Timestamp(0);
+  }
+
+  public DateColSubtractDateColumn() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type date (epochDays).
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type date (epochDays).
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
+
+    // Output is type interval_day_time.
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+    long[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    HiveIntervalDayTime resultIntervalDayTime = outputColVector.getScratchIntervalDayTime();
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
+    } else if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesIntervalDayTime(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+


[05/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java.orig
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java.orig b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java.orig
new file mode 100644
index 0000000..137c39f
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java.orig
@@ -0,0 +1,2034 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io.orc;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.orc.OrcFile.Version;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.HiveTestUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Tests for the top level reader/streamFactory of ORC files.
+ */
+@RunWith(value = Parameterized.class)
+public class TestOrcFile {
+
+  public static class DecimalStruct {
+    HiveDecimalWritable dec;
+
+    DecimalStruct(HiveDecimalWritable hdw) {
+      this.dec = hdw;
+    }
+  }
+
+  public static class SimpleStruct {
+    BytesWritable bytes1;
+    Text string1;
+
+    SimpleStruct(BytesWritable b1, String s1) {
+      this.bytes1 = b1;
+      if(s1 == null) {
+        this.string1 = null;
+      } else {
+        this.string1 = new Text(s1);
+      }
+    }
+  }
+
+  public static class InnerStruct {
+    int int1;
+    Text string1 = new Text();
+    InnerStruct(int int1, String string1) {
+      this.int1 = int1;
+      this.string1.set(string1);
+    }
+  }
+
+  public static class MiddleStruct {
+    List<InnerStruct> list = new ArrayList<InnerStruct>();
+
+    MiddleStruct(InnerStruct... items) {
+      list.clear();
+      list.addAll(Arrays.asList(items));
+    }
+  }
+
+  public static class BigRow {
+    Boolean boolean1;
+    Byte byte1;
+    Short short1;
+    Integer int1;
+    Long long1;
+    Float float1;
+    Double double1;
+    BytesWritable bytes1;
+    Text string1;
+    MiddleStruct middle;
+    List<InnerStruct> list = new ArrayList<InnerStruct>();
+    Map<Text, InnerStruct> map = new HashMap<Text, InnerStruct>();
+
+    BigRow(Boolean b1, Byte b2, Short s1, Integer i1, Long l1, Float f1,
+           Double d1,
+           BytesWritable b3, String s2, MiddleStruct m1,
+           List<InnerStruct> l2, Map<Text, InnerStruct> m2) {
+      this.boolean1 = b1;
+      this.byte1 = b2;
+      this.short1 = s1;
+      this.int1 = i1;
+      this.long1 = l1;
+      this.float1 = f1;
+      this.double1 = d1;
+      this.bytes1 = b3;
+      if (s2 == null) {
+        this.string1 = null;
+      } else {
+        this.string1 = new Text(s2);
+      }
+      this.middle = m1;
+      this.list = l2;
+      this.map = m2;
+    }
+  }
+
+  private static InnerStruct inner(int i, String s) {
+    return new InnerStruct(i, s);
+  }
+
+  private static Map<Text, InnerStruct> map(InnerStruct... items)  {
+    Map<Text, InnerStruct> result = new HashMap<Text, InnerStruct>();
+    for(InnerStruct i: items) {
+      result.put(new Text(i.string1), i);
+    }
+    return result;
+  }
+
+  private static List<InnerStruct> list(InnerStruct... items) {
+    List<InnerStruct> result = new ArrayList<InnerStruct>();
+    result.addAll(Arrays.asList(items));
+    return result;
+  }
+
+  private static BytesWritable bytes(int... items) {
+    BytesWritable result = new BytesWritable();
+    result.setSize(items.length);
+    for(int i=0; i < items.length; ++i) {
+      result.getBytes()[i] = (byte) items[i];
+    }
+    return result;
+  }
+
+  private static ByteBuffer byteBuf(int... items) {
+    ByteBuffer result = ByteBuffer.allocate(items.length);
+    for(int item: items) {
+      result.put((byte) item);
+    }
+    result.flip();
+    return result;
+  }
+
+  Path workDir = new Path(System.getProperty("test.tmp.dir",
+      "target" + File.separator + "test" + File.separator + "tmp"));
+
+  Configuration conf;
+  FileSystem fs;
+  Path testFilePath;
+  private final boolean zeroCopy;
+
+  @Parameters
+  public static Collection<Boolean[]> data() {
+    return Arrays.asList(new Boolean[][] { {false}, {true}});
+  }
+
+  public TestOrcFile(Boolean zcr) {
+    zeroCopy = zcr.booleanValue();
+  }
+
+  @Rule
+  public TestName testCaseName = new TestName();
+
+  @Before
+  public void openFileSystem () throws Exception {
+    conf = new Configuration();
+    if(zeroCopy) {
+      conf.setBoolean(HiveConf.ConfVars.HIVE_ORC_ZEROCOPY.varname, zeroCopy);
+    }
+    fs = FileSystem.getLocal(conf);
+    testFilePath = new Path(workDir, "TestOrcFile." +
+        testCaseName.getMethodName() + ".orc");
+    fs.delete(testFilePath, false);
+  }
+
+  @Test
+  public void testReadFormat_0_11() throws Exception {
+    Path oldFilePath =
+        new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc"));
+    Reader reader = OrcFile.createReader(oldFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+
+    int stripeCount = 0;
+    int rowCount = 0;
+    long currentOffset = -1;
+    for(StripeInformation stripe : reader.getStripes()) {
+      stripeCount += 1;
+      rowCount += stripe.getNumberOfRows();
+      if (currentOffset < 0) {
+        currentOffset = stripe.getOffset() + stripe.getIndexLength()
+            + stripe.getDataLength() + stripe.getFooterLength();
+      } else {
+        assertEquals(currentOffset, stripe.getOffset());
+        currentOffset += stripe.getIndexLength() + stripe.getDataLength()
+            + stripe.getFooterLength();
+      }
+    }
+    assertEquals(reader.getNumberOfRows(), rowCount);
+    assertEquals(2, stripeCount);
+
+    // check the stats
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(7500, stats[1].getNumberOfValues());
+    assertEquals(3750, ((BooleanColumnStatistics) stats[1]).getFalseCount());
+    assertEquals(3750, ((BooleanColumnStatistics) stats[1]).getTrueCount());
+    assertEquals("count: 7500 hasNull: true true: 3750", stats[1].toString());
+
+    assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum());
+    assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum());
+    assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined());
+    assertEquals(11520000, ((IntegerColumnStatistics) stats[3]).getSum());
+    assertEquals("count: 7500 hasNull: true min: 1024 max: 2048 sum: 11520000",
+        stats[3].toString());
+
+    assertEquals(Long.MAX_VALUE,
+        ((IntegerColumnStatistics) stats[5]).getMaximum());
+    assertEquals(Long.MAX_VALUE,
+        ((IntegerColumnStatistics) stats[5]).getMinimum());
+    assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined());
+    assertEquals(
+        "count: 7500 hasNull: true min: 9223372036854775807 max: 9223372036854775807",
+        stats[5].toString());
+
+    assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum());
+    assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum());
+    assertEquals(-75000.0, ((DoubleColumnStatistics) stats[7]).getSum(),
+        0.00001);
+    assertEquals("count: 7500 hasNull: true min: -15.0 max: -5.0 sum: -75000.0",
+        stats[7].toString());
+
+    assertEquals("count: 7500 hasNull: true min: bye max: hi sum: 0", stats[9].toString());
+
+    // check the inspectors
+    StructObjectInspector readerInspector = (StructObjectInspector) reader
+        .getObjectInspector();
+    assertEquals(ObjectInspector.Category.STRUCT, readerInspector.getCategory());
+    assertEquals("struct<boolean1:boolean,byte1:tinyint,short1:smallint,"
+        + "int1:int,long1:bigint,float1:float,double1:double,bytes1:"
+        + "binary,string1:string,middle:struct<list:array<struct<int1:int,"
+        + "string1:string>>>,list:array<struct<int1:int,string1:string>>,"
+        + "map:map<string,struct<int1:int,string1:string>>,ts:timestamp,"
+        + "decimal1:decimal(38,18)>", readerInspector.getTypeName());
+    List<? extends StructField> fields = readerInspector
+        .getAllStructFieldRefs();
+    BooleanObjectInspector bo = (BooleanObjectInspector) readerInspector
+        .getStructFieldRef("boolean1").getFieldObjectInspector();
+    ByteObjectInspector by = (ByteObjectInspector) readerInspector
+        .getStructFieldRef("byte1").getFieldObjectInspector();
+    ShortObjectInspector sh = (ShortObjectInspector) readerInspector
+        .getStructFieldRef("short1").getFieldObjectInspector();
+    IntObjectInspector in = (IntObjectInspector) readerInspector
+        .getStructFieldRef("int1").getFieldObjectInspector();
+    LongObjectInspector lo = (LongObjectInspector) readerInspector
+        .getStructFieldRef("long1").getFieldObjectInspector();
+    FloatObjectInspector fl = (FloatObjectInspector) readerInspector
+        .getStructFieldRef("float1").getFieldObjectInspector();
+    DoubleObjectInspector dbl = (DoubleObjectInspector) readerInspector
+        .getStructFieldRef("double1").getFieldObjectInspector();
+    BinaryObjectInspector bi = (BinaryObjectInspector) readerInspector
+        .getStructFieldRef("bytes1").getFieldObjectInspector();
+    StringObjectInspector st = (StringObjectInspector) readerInspector
+        .getStructFieldRef("string1").getFieldObjectInspector();
+    StructObjectInspector mid = (StructObjectInspector) readerInspector
+        .getStructFieldRef("middle").getFieldObjectInspector();
+    List<? extends StructField> midFields = mid.getAllStructFieldRefs();
+    ListObjectInspector midli = (ListObjectInspector) midFields.get(0)
+        .getFieldObjectInspector();
+    StructObjectInspector inner = (StructObjectInspector) midli
+        .getListElementObjectInspector();
+    List<? extends StructField> inFields = inner.getAllStructFieldRefs();
+    ListObjectInspector li = (ListObjectInspector) readerInspector
+        .getStructFieldRef("list").getFieldObjectInspector();
+    MapObjectInspector ma = (MapObjectInspector) readerInspector
+        .getStructFieldRef("map").getFieldObjectInspector();
+    TimestampObjectInspector tso = (TimestampObjectInspector) readerInspector
+        .getStructFieldRef("ts").getFieldObjectInspector();
+    HiveDecimalObjectInspector dco = (HiveDecimalObjectInspector) readerInspector
+        .getStructFieldRef("decimal1").getFieldObjectInspector();
+    StringObjectInspector mk = (StringObjectInspector) ma
+        .getMapKeyObjectInspector();
+    RecordReader rows = reader.rows();
+    Object row = rows.next(null);
+    assertNotNull(row);
+    // check the contents of the first row
+    assertEquals(false,
+        bo.get(readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals(1,
+        by.get(readerInspector.getStructFieldData(row, fields.get(1))));
+    assertEquals(1024,
+        sh.get(readerInspector.getStructFieldData(row, fields.get(2))));
+    assertEquals(65536,
+        in.get(readerInspector.getStructFieldData(row, fields.get(3))));
+    assertEquals(Long.MAX_VALUE,
+        lo.get(readerInspector.getStructFieldData(row, fields.get(4))));
+    assertEquals(1.0,
+        fl.get(readerInspector.getStructFieldData(row, fields.get(5))), 0.00001);
+    assertEquals(-15.0,
+        dbl.get(readerInspector.getStructFieldData(row, fields.get(6))),
+        0.00001);
+    assertEquals(bytes(0, 1, 2, 3, 4),
+        bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,
+            fields.get(7))));
+    assertEquals("hi", st.getPrimitiveJavaObject(readerInspector
+        .getStructFieldData(row, fields.get(8))));
+    List<?> midRow = midli.getList(mid.getStructFieldData(
+        readerInspector.getStructFieldData(row, fields.get(9)),
+        midFields.get(0)));
+    assertNotNull(midRow);
+    assertEquals(2, midRow.size());
+    assertEquals(1,
+        in.get(inner.getStructFieldData(midRow.get(0), inFields.get(0))));
+    assertEquals("bye", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(0), inFields.get(1))));
+    assertEquals(2,
+        in.get(inner.getStructFieldData(midRow.get(1), inFields.get(0))));
+    assertEquals("sigh", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(1), inFields.get(1))));
+    List<?> list = li.getList(readerInspector.getStructFieldData(row,
+        fields.get(10)));
+    assertEquals(2, list.size());
+    assertEquals(3,
+        in.get(inner.getStructFieldData(list.get(0), inFields.get(0))));
+    assertEquals("good", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(0), inFields.get(1))));
+    assertEquals(4,
+        in.get(inner.getStructFieldData(list.get(1), inFields.get(0))));
+    assertEquals("bad", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(1), inFields.get(1))));
+    Map<?, ?> map = ma.getMap(readerInspector.getStructFieldData(row,
+        fields.get(11)));
+    assertEquals(0, map.size());
+    assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),
+        tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(12))));
+    assertEquals(HiveDecimal.create("12345678.6547456"),
+        dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(13))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    rows.seekToRow(7499);
+    row = rows.next(null);
+    assertEquals(true,
+        bo.get(readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals(100,
+        by.get(readerInspector.getStructFieldData(row, fields.get(1))));
+    assertEquals(2048,
+        sh.get(readerInspector.getStructFieldData(row, fields.get(2))));
+    assertEquals(65536,
+        in.get(readerInspector.getStructFieldData(row, fields.get(3))));
+    assertEquals(Long.MAX_VALUE,
+        lo.get(readerInspector.getStructFieldData(row, fields.get(4))));
+    assertEquals(2.0,
+        fl.get(readerInspector.getStructFieldData(row, fields.get(5))), 0.00001);
+    assertEquals(-5.0,
+        dbl.get(readerInspector.getStructFieldData(row, fields.get(6))),
+        0.00001);
+    assertEquals(bytes(), bi.getPrimitiveWritableObject(readerInspector
+        .getStructFieldData(row, fields.get(7))));
+    assertEquals("bye", st.getPrimitiveJavaObject(readerInspector
+        .getStructFieldData(row, fields.get(8))));
+    midRow = midli.getList(mid.getStructFieldData(
+        readerInspector.getStructFieldData(row, fields.get(9)),
+        midFields.get(0)));
+    assertNotNull(midRow);
+    assertEquals(2, midRow.size());
+    assertEquals(1,
+        in.get(inner.getStructFieldData(midRow.get(0), inFields.get(0))));
+    assertEquals("bye", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(0), inFields.get(1))));
+    assertEquals(2,
+        in.get(inner.getStructFieldData(midRow.get(1), inFields.get(0))));
+    assertEquals("sigh", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(1), inFields.get(1))));
+    list = li.getList(readerInspector.getStructFieldData(row, fields.get(10)));
+    assertEquals(3, list.size());
+    assertEquals(100000000,
+        in.get(inner.getStructFieldData(list.get(0), inFields.get(0))));
+    assertEquals("cat", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(0), inFields.get(1))));
+    assertEquals(-100000,
+        in.get(inner.getStructFieldData(list.get(1), inFields.get(0))));
+    assertEquals("in", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(1), inFields.get(1))));
+    assertEquals(1234,
+        in.get(inner.getStructFieldData(list.get(2), inFields.get(0))));
+    assertEquals("hat", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(2), inFields.get(1))));
+    map = ma.getMap(readerInspector.getStructFieldData(row, fields.get(11)));
+    assertEquals(2, map.size());
+    boolean[] found = new boolean[2];
+    for(Object key : map.keySet()) {
+      String str = mk.getPrimitiveJavaObject(key);
+      if (str.equals("chani")) {
+        assertEquals(false, found[0]);
+        assertEquals(5,
+            in.get(inner.getStructFieldData(map.get(key), inFields.get(0))));
+        assertEquals(str, st.getPrimitiveJavaObject(inner.getStructFieldData(
+            map.get(key), inFields.get(1))));
+        found[0] = true;
+      } else if (str.equals("mauddib")) {
+        assertEquals(false, found[1]);
+        assertEquals(1,
+            in.get(inner.getStructFieldData(map.get(key), inFields.get(0))));
+        assertEquals(str, st.getPrimitiveJavaObject(inner.getStructFieldData(
+            map.get(key), inFields.get(1))));
+        found[1] = true;
+      } else {
+        throw new IllegalArgumentException("Unknown key " + str);
+      }
+    }
+    assertEquals(true, found[0]);
+    assertEquals(true, found[1]);
+    assertEquals(Timestamp.valueOf("2000-03-12 15:00:01"),
+        tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(12))));
+    assertEquals(HiveDecimal.create("12345678.6547457"),
+        dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(13))));
+
+    // handle the close up
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+  @Test
+  public void testTimestamp() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class,
+          ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+
+    Writer writer = OrcFile.createWriter(testFilePath,
+        OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000)
+            .version(OrcFile.Version.V_0_11));
+    List<Timestamp> tslist = Lists.newArrayList();
+    tslist.add(Timestamp.valueOf("2037-01-01 00:00:00.000999"));
+    tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.000000222"));
+    tslist.add(Timestamp.valueOf("1999-01-01 00:00:00.999999999"));
+    tslist.add(Timestamp.valueOf("1995-01-01 00:00:00.688888888"));
+    tslist.add(Timestamp.valueOf("2002-01-01 00:00:00.1"));
+    tslist.add(Timestamp.valueOf("2010-03-02 00:00:00.000009001"));
+    tslist.add(Timestamp.valueOf("2005-01-01 00:00:00.000002229"));
+    tslist.add(Timestamp.valueOf("2006-01-01 00:00:00.900203003"));
+    tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.800000007"));
+    tslist.add(Timestamp.valueOf("1996-08-02 00:00:00.723100809"));
+    tslist.add(Timestamp.valueOf("1998-11-02 00:00:00.857340643"));
+    tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
+
+    for (Timestamp ts : tslist) {
+      writer.addRow(ts);
+    }
+
+    writer.close();
+
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    RecordReader rows = reader.rows(null);
+    int idx = 0;
+    while (rows.hasNext()) {
+      Object row = rows.next(null);
+      assertEquals(tslist.get(idx++).getNanos(), ((TimestampWritable) row).getNanos());
+    }
+    assertEquals(1, OrcUtils.getFlattenedColumnsCount(inspector));
+    boolean[] expected = new boolean[] {false};
+    boolean[] included = OrcUtils.includeColumns("", "ts", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+  }
+
+  @Test
+  public void testHiveDecimalAllNulls() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (DecimalStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+
+    Writer writer = OrcFile.createWriter(testFilePath,
+        OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000));
+    // this is an invalid decimal value, getting HiveDecimal from it will return null
+    writer.addRow(new DecimalStruct(new HiveDecimalWritable("1.463040009E9".getBytes(), 8)));
+    writer.addRow(new DecimalStruct(null));
+    writer.close();
+
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    StructObjectInspector readerInspector =
+        (StructObjectInspector) reader.getObjectInspector();
+    List<? extends StructField> fields = readerInspector.getAllStructFieldRefs();
+    HiveDecimalObjectInspector doi = (HiveDecimalObjectInspector) readerInspector.
+        getStructFieldRef("dec").getFieldObjectInspector();
+    RecordReader rows = reader.rows(null);
+    while (rows.hasNext()) {
+      Object row = rows.next(null);
+      assertEquals(null, doi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,
+          fields.get(0))));
+    }
+
+    // check the stats
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(2, stats[0].getNumberOfValues());
+    assertEquals(0, stats[1].getNumberOfValues());
+    assertEquals(true, stats[1].hasNull());
+  }
+
+  @Test
+  public void testStringAndBinaryStatistics() throws Exception {
+
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (SimpleStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
+    writer.addRow(new SimpleStruct(bytes(0,1,2,3,4), "foo"));
+    writer.addRow(new SimpleStruct(bytes(0,1,2,3), "bar"));
+    writer.addRow(new SimpleStruct(bytes(0,1,2,3,4,5), null));
+    writer.addRow(new SimpleStruct(null, "hi"));
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+
+    assertEquals(3, OrcUtils.getFlattenedColumnsCount(inspector));
+    boolean[] expected = new boolean[] {false, false, true};
+    boolean[] included = OrcUtils.includeColumns("string1", "bytes1,string1", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, false, false};
+    included = OrcUtils.includeColumns("", "bytes1,string1", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, false, false};
+    included = OrcUtils.includeColumns(null, "bytes1,string1", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    // check the stats
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(4, stats[0].getNumberOfValues());
+    assertEquals("count: 4 hasNull: false", stats[0].toString());
+
+    assertEquals(3, stats[1].getNumberOfValues());
+    assertEquals(15, ((BinaryColumnStatistics) stats[1]).getSum());
+    assertEquals("count: 3 hasNull: true sum: 15", stats[1].toString());
+
+    assertEquals(3, stats[2].getNumberOfValues());
+    assertEquals("bar", ((StringColumnStatistics) stats[2]).getMinimum());
+    assertEquals("hi", ((StringColumnStatistics) stats[2]).getMaximum());
+    assertEquals(8, ((StringColumnStatistics) stats[2]).getSum());
+    assertEquals("count: 3 hasNull: true min: bar max: hi sum: 8",
+        stats[2].toString());
+
+    // check the inspectors
+    StructObjectInspector readerInspector =
+        (StructObjectInspector) reader.getObjectInspector();
+    assertEquals(ObjectInspector.Category.STRUCT,
+        readerInspector.getCategory());
+    assertEquals("struct<bytes1:binary,string1:string>",
+        readerInspector.getTypeName());
+    List<? extends StructField> fields =
+        readerInspector.getAllStructFieldRefs();
+    BinaryObjectInspector bi = (BinaryObjectInspector) readerInspector.
+        getStructFieldRef("bytes1").getFieldObjectInspector();
+    StringObjectInspector st = (StringObjectInspector) readerInspector.
+        getStructFieldRef("string1").getFieldObjectInspector();
+    RecordReader rows = reader.rows();
+    Object row = rows.next(null);
+    assertNotNull(row);
+    // check the contents of the first row
+    assertEquals(bytes(0,1,2,3,4), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals("foo", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertEquals(bytes(0,1,2,3), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals("bar", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertEquals(bytes(0,1,2,3,4,5), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertNull(st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertNull(bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals("hi", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(1))));
+
+    // handle the close up
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+
+  @Test
+  public void testStripeLevelStats() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+
+    Writer writer = OrcFile.createWriter(testFilePath,
+        OrcFile.writerOptions(conf)
+            .inspector(inspector)
+            .stripeSize(100000)
+            .bufferSize(10000));
+    for (int i = 0; i < 11000; i++) {
+      if (i >= 5000) {
+        if (i >= 10000) {
+          writer.addRow(new InnerStruct(3, "three"));
+        } else {
+          writer.addRow(new InnerStruct(2, "two"));
+        }
+      } else {
+        writer.addRow(new InnerStruct(1, "one"));
+      }
+    }
+
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+
+    assertEquals(3, OrcUtils.getFlattenedColumnsCount(inspector));
+    boolean[] expected = new boolean[] {false, true, false};
+    boolean[] included = OrcUtils.includeColumns("int1", "int1,string1", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    Metadata metadata = reader.getMetadata();
+    int numStripes = metadata.getStripeStatistics().size();
+    assertEquals(3, numStripes);
+    StripeStatistics ss1 = metadata.getStripeStatistics().get(0);
+    StripeStatistics ss2 = metadata.getStripeStatistics().get(1);
+    StripeStatistics ss3 = metadata.getStripeStatistics().get(2);
+
+    assertEquals(5000, ss1.getColumnStatistics()[0].getNumberOfValues());
+    assertEquals(5000, ss2.getColumnStatistics()[0].getNumberOfValues());
+    assertEquals(1000, ss3.getColumnStatistics()[0].getNumberOfValues());
+
+    assertEquals(5000, (ss1.getColumnStatistics()[1]).getNumberOfValues());
+    assertEquals(5000, (ss2.getColumnStatistics()[1]).getNumberOfValues());
+    assertEquals(1000, (ss3.getColumnStatistics()[1]).getNumberOfValues());
+    assertEquals(1, ((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getMinimum());
+    assertEquals(2, ((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getMinimum());
+    assertEquals(3, ((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getMinimum());
+    assertEquals(1, ((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getMaximum());
+    assertEquals(2, ((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getMaximum());
+    assertEquals(3, ((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getMaximum());
+    assertEquals(5000, ((IntegerColumnStatistics)ss1.getColumnStatistics()[1]).getSum());
+    assertEquals(10000, ((IntegerColumnStatistics)ss2.getColumnStatistics()[1]).getSum());
+    assertEquals(3000, ((IntegerColumnStatistics)ss3.getColumnStatistics()[1]).getSum());
+
+    assertEquals(5000, (ss1.getColumnStatistics()[2]).getNumberOfValues());
+    assertEquals(5000, (ss2.getColumnStatistics()[2]).getNumberOfValues());
+    assertEquals(1000, (ss3.getColumnStatistics()[2]).getNumberOfValues());
+    assertEquals("one", ((StringColumnStatistics)ss1.getColumnStatistics()[2]).getMinimum());
+    assertEquals("two", ((StringColumnStatistics)ss2.getColumnStatistics()[2]).getMinimum());
+    assertEquals("three", ((StringColumnStatistics)ss3.getColumnStatistics()[2]).getMinimum());
+    assertEquals("one", ((StringColumnStatistics)ss1.getColumnStatistics()[2]).getMaximum());
+    assertEquals("two", ((StringColumnStatistics) ss2.getColumnStatistics()[2]).getMaximum());
+    assertEquals("three", ((StringColumnStatistics)ss3.getColumnStatistics()[2]).getMaximum());
+    assertEquals(15000, ((StringColumnStatistics)ss1.getColumnStatistics()[2]).getSum());
+    assertEquals(15000, ((StringColumnStatistics)ss2.getColumnStatistics()[2]).getSum());
+    assertEquals(5000, ((StringColumnStatistics)ss3.getColumnStatistics()[2]).getSum());
+
+    RecordReaderImpl recordReader = (RecordReaderImpl) reader.rows();
+    OrcProto.RowIndex[] index = recordReader.readRowIndex(0, null, null).getRowGroupIndex();
+    assertEquals(3, index.length);
+    List<OrcProto.RowIndexEntry> items = index[1].getEntryList();
+    assertEquals(1, items.size());
+    assertEquals(3, items.get(0).getPositionsCount());
+    assertEquals(0, items.get(0).getPositions(0));
+    assertEquals(0, items.get(0).getPositions(1));
+    assertEquals(0, items.get(0).getPositions(2));
+    assertEquals(1,
+                 items.get(0).getStatistics().getIntStatistics().getMinimum());
+    index = recordReader.readRowIndex(1, null, null).getRowGroupIndex();
+    assertEquals(3, index.length);
+    items = index[1].getEntryList();
+    assertEquals(2,
+        items.get(0).getStatistics().getIntStatistics().getMaximum());
+  }
+
+  @Test
+  public void test1() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+        OrcFile.writerOptions(conf)
+            .inspector(inspector)
+            .stripeSize(100000)
+            .bufferSize(10000));
+    writer.addRow(new BigRow(false, (byte) 1, (short) 1024, 65536,
+        Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0, 1, 2, 3, 4), "hi",
+        new MiddleStruct(inner(1, "bye"), inner(2, "sigh")),
+        list(inner(3, "good"), inner(4, "bad")),
+        map()));
+    writer.addRow(new BigRow(true, (byte) 100, (short) 2048, 65536,
+        Long.MAX_VALUE, (float) 2.0, -5.0, bytes(), "bye",
+        new MiddleStruct(inner(1, "bye"), inner(2, "sigh")),
+        list(inner(100000000, "cat"), inner(-100000, "in"), inner(1234, "hat")),
+        map(inner(5, "chani"), inner(1, "mauddib"))));
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+
+    assertEquals(24, OrcUtils.getFlattenedColumnsCount(inspector));
+    boolean[] expected = new boolean[] {false, false, false, false, false,
+        false, false, false, false, false,
+        false, false, false, false, false,
+        false, false, false, false, false,
+        false, false, false, false};
+    boolean[] included = OrcUtils.includeColumns("",
+        "boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, true, false, false, false,
+        false, false, false, false, true,
+        true, true, true, true, true,
+        false, false, false, false, true,
+        true, true, true, true};
+    included = OrcUtils.includeColumns("boolean1,string1,middle,map",
+        "boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, true, false, false, false,
+        false, false, false, false, true,
+        true, true, true, true, true,
+        false, false, false, false, true,
+        true, true, true, true};
+    included = OrcUtils.includeColumns("boolean1,string1,middle,map",
+        "boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, true, true, true, true,
+        true, true, true, true, true,
+        true, true, true, true, true,
+        true, true, true, true, true,
+        true, true, true, true};
+    included = OrcUtils.includeColumns(
+        "boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map",
+        "boolean1,byte1,short1,int1,long1,float1,double1,bytes1,string1,middle,list,map", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    Metadata metadata = reader.getMetadata();
+
+    // check the stats
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(2, stats[1].getNumberOfValues());
+    assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount());
+    assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount());
+    assertEquals("count: 2 hasNull: false true: 1", stats[1].toString());
+
+    assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum());
+    assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum());
+    assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined());
+    assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum());
+    assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072",
+        stats[3].toString());
+
+    StripeStatistics ss = metadata.getStripeStatistics().get(0);
+    assertEquals(2, ss.getColumnStatistics()[0].getNumberOfValues());
+    assertEquals(1, ((BooleanColumnStatistics) ss.getColumnStatistics()[1]).getTrueCount());
+    assertEquals(1024, ((IntegerColumnStatistics) ss.getColumnStatistics()[3]).getMinimum());
+    assertEquals(2048, ((IntegerColumnStatistics) ss.getColumnStatistics()[3]).getMaximum());
+    assertEquals(3072, ((IntegerColumnStatistics) ss.getColumnStatistics()[3]).getSum());
+    assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum());
+    assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum());
+    assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001);
+    assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0",
+        stats[7].toString());
+
+    assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5", stats[9].toString());
+
+    // check the inspectors
+    StructObjectInspector readerInspector =
+        (StructObjectInspector) reader.getObjectInspector();
+    assertEquals(ObjectInspector.Category.STRUCT,
+        readerInspector.getCategory());
+    assertEquals("struct<boolean1:boolean,byte1:tinyint,short1:smallint,"
+        + "int1:int,long1:bigint,float1:float,double1:double,bytes1:"
+        + "binary,string1:string,middle:struct<list:array<struct<int1:int,"
+        + "string1:string>>>,list:array<struct<int1:int,string1:string>>,"
+        + "map:map<string,struct<int1:int,string1:string>>>",
+        readerInspector.getTypeName());
+    List<? extends StructField> fields =
+        readerInspector.getAllStructFieldRefs();
+    BooleanObjectInspector bo = (BooleanObjectInspector) readerInspector.
+        getStructFieldRef("boolean1").getFieldObjectInspector();
+    ByteObjectInspector by = (ByteObjectInspector) readerInspector.
+        getStructFieldRef("byte1").getFieldObjectInspector();
+    ShortObjectInspector sh = (ShortObjectInspector) readerInspector.
+        getStructFieldRef("short1").getFieldObjectInspector();
+    IntObjectInspector in = (IntObjectInspector) readerInspector.
+        getStructFieldRef("int1").getFieldObjectInspector();
+    LongObjectInspector lo = (LongObjectInspector) readerInspector.
+        getStructFieldRef("long1").getFieldObjectInspector();
+    FloatObjectInspector fl = (FloatObjectInspector) readerInspector.
+        getStructFieldRef("float1").getFieldObjectInspector();
+    DoubleObjectInspector dbl = (DoubleObjectInspector) readerInspector.
+        getStructFieldRef("double1").getFieldObjectInspector();
+    BinaryObjectInspector bi = (BinaryObjectInspector) readerInspector.
+        getStructFieldRef("bytes1").getFieldObjectInspector();
+    StringObjectInspector st = (StringObjectInspector) readerInspector.
+        getStructFieldRef("string1").getFieldObjectInspector();
+    StructObjectInspector mid = (StructObjectInspector) readerInspector.
+        getStructFieldRef("middle").getFieldObjectInspector();
+    List<? extends StructField> midFields =
+        mid.getAllStructFieldRefs();
+    ListObjectInspector midli =
+        (ListObjectInspector) midFields.get(0).getFieldObjectInspector();
+    StructObjectInspector inner = (StructObjectInspector)
+        midli.getListElementObjectInspector();
+    List<? extends StructField> inFields = inner.getAllStructFieldRefs();
+    ListObjectInspector li = (ListObjectInspector) readerInspector.
+        getStructFieldRef("list").getFieldObjectInspector();
+    MapObjectInspector ma = (MapObjectInspector) readerInspector.
+        getStructFieldRef("map").getFieldObjectInspector();
+    StringObjectInspector mk = (StringObjectInspector)
+        ma.getMapKeyObjectInspector();
+    RecordReader rows = reader.rows();
+    Object row = rows.next(null);
+    assertNotNull(row);
+    // check the contents of the first row
+    assertEquals(false,
+        bo.get(readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals(1, by.get(readerInspector.getStructFieldData(row,
+        fields.get(1))));
+    assertEquals(1024, sh.get(readerInspector.getStructFieldData(row,
+        fields.get(2))));
+    assertEquals(65536, in.get(readerInspector.getStructFieldData(row,
+        fields.get(3))));
+    assertEquals(Long.MAX_VALUE, lo.get(readerInspector.
+        getStructFieldData(row, fields.get(4))));
+    assertEquals(1.0, fl.get(readerInspector.getStructFieldData(row,
+        fields.get(5))), 0.00001);
+    assertEquals(-15.0, dbl.get(readerInspector.getStructFieldData(row,
+        fields.get(6))), 0.00001);
+    assertEquals(bytes(0,1,2,3,4), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(7))));
+    assertEquals("hi", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(8))));
+    List<?> midRow = midli.getList(mid.getStructFieldData(readerInspector.
+        getStructFieldData(row, fields.get(9)), midFields.get(0)));
+    assertNotNull(midRow);
+    assertEquals(2, midRow.size());
+    assertEquals(1, in.get(inner.getStructFieldData(midRow.get(0),
+        inFields.get(0))));
+    assertEquals("bye", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (midRow.get(0), inFields.get(1))));
+    assertEquals(2, in.get(inner.getStructFieldData(midRow.get(1),
+        inFields.get(0))));
+    assertEquals("sigh", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (midRow.get(1), inFields.get(1))));
+    List<?> list = li.getList(readerInspector.getStructFieldData(row,
+        fields.get(10)));
+    assertEquals(2, list.size());
+    assertEquals(3, in.get(inner.getStructFieldData(list.get(0),
+        inFields.get(0))));
+    assertEquals("good", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (list.get(0), inFields.get(1))));
+    assertEquals(4, in.get(inner.getStructFieldData(list.get(1),
+        inFields.get(0))));
+    assertEquals("bad", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (list.get(1), inFields.get(1))));
+    Map<?,?> map = ma.getMap(readerInspector.getStructFieldData(row,
+        fields.get(11)));
+    assertEquals(0, map.size());
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    row = rows.next(row);
+    assertEquals(true,
+        bo.get(readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals(100, by.get(readerInspector.getStructFieldData(row,
+        fields.get(1))));
+    assertEquals(2048, sh.get(readerInspector.getStructFieldData(row,
+        fields.get(2))));
+    assertEquals(65536, in.get(readerInspector.getStructFieldData(row,
+        fields.get(3))));
+    assertEquals(Long.MAX_VALUE, lo.get(readerInspector.
+        getStructFieldData(row, fields.get(4))));
+    assertEquals(2.0, fl.get(readerInspector.getStructFieldData(row,
+        fields.get(5))), 0.00001);
+    assertEquals(-5.0, dbl.get(readerInspector.getStructFieldData(row,
+        fields.get(6))), 0.00001);
+    assertEquals(bytes(), bi.getPrimitiveWritableObject(
+        readerInspector.getStructFieldData(row, fields.get(7))));
+    assertEquals("bye", st.getPrimitiveJavaObject(readerInspector.
+        getStructFieldData(row, fields.get(8))));
+    midRow = midli.getList(mid.getStructFieldData(readerInspector.
+        getStructFieldData(row, fields.get(9)), midFields.get(0)));
+    assertNotNull(midRow);
+    assertEquals(2, midRow.size());
+    assertEquals(1, in.get(inner.getStructFieldData(midRow.get(0),
+        inFields.get(0))));
+    assertEquals("bye", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (midRow.get(0), inFields.get(1))));
+    assertEquals(2, in.get(inner.getStructFieldData(midRow.get(1),
+        inFields.get(0))));
+    assertEquals("sigh", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (midRow.get(1), inFields.get(1))));
+    list = li.getList(readerInspector.getStructFieldData(row,
+        fields.get(10)));
+    assertEquals(3, list.size());
+    assertEquals(100000000, in.get(inner.getStructFieldData(list.get(0),
+        inFields.get(0))));
+    assertEquals("cat", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (list.get(0), inFields.get(1))));
+    assertEquals(-100000, in.get(inner.getStructFieldData(list.get(1),
+        inFields.get(0))));
+    assertEquals("in", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (list.get(1), inFields.get(1))));
+    assertEquals(1234, in.get(inner.getStructFieldData(list.get(2),
+        inFields.get(0))));
+    assertEquals("hat", st.getPrimitiveJavaObject(inner.getStructFieldData
+        (list.get(2), inFields.get(1))));
+    map = ma.getMap(readerInspector.getStructFieldData(row,
+        fields.get(11)));
+    assertEquals(2, map.size());
+    boolean[] found = new boolean[2];
+    for(Object key: map.keySet()) {
+      String str = mk.getPrimitiveJavaObject(key);
+      if (str.equals("chani")) {
+        assertEquals(false, found[0]);
+        assertEquals(5, in.get(inner.getStructFieldData(map.get(key),
+            inFields.get(0))));
+        assertEquals(str, st.getPrimitiveJavaObject(
+            inner.getStructFieldData(map.get(key), inFields.get(1))));
+        found[0] = true;
+      } else if (str.equals("mauddib")) {
+        assertEquals(false, found[1]);
+        assertEquals(1, in.get(inner.getStructFieldData(map.get(key),
+            inFields.get(0))));
+        assertEquals(str, st.getPrimitiveJavaObject(
+            inner.getStructFieldData(map.get(key), inFields.get(1))));
+        found[1] = true;
+      } else {
+        throw new IllegalArgumentException("Unknown key " + str);
+      }
+    }
+    assertEquals(true, found[0]);
+    assertEquals(true, found[1]);
+
+    // handle the close up
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+  @Test
+  public void columnProjection() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100)
+                                         .rowIndexStride(1000));
+    Random r1 = new Random(1);
+    Random r2 = new Random(2);
+    int x;
+    int minInt=0, maxInt=0;
+    String y;
+    String minStr = null, maxStr = null;
+    for(int i=0; i < 21000; ++i) {
+      x = r1.nextInt();
+      y = Long.toHexString(r2.nextLong());
+      if (i == 0 || x < minInt) {
+        minInt = x;
+      }
+      if (i == 0 || x > maxInt) {
+        maxInt = x;
+      }
+      if (i == 0 || y.compareTo(minStr) < 0) {
+        minStr = y;
+      }
+      if (i == 0 || y.compareTo(maxStr) > 0) {
+        maxStr = y;
+      }
+      writer.addRow(inner(x, y));
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+
+    // check out the statistics
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(3, stats.length);
+    for(ColumnStatistics s: stats) {
+      assertEquals(21000, s.getNumberOfValues());
+      if (s instanceof IntegerColumnStatistics) {
+        assertEquals(minInt, ((IntegerColumnStatistics) s).getMinimum());
+        assertEquals(maxInt, ((IntegerColumnStatistics) s).getMaximum());
+      } else if (s instanceof  StringColumnStatistics) {
+        assertEquals(maxStr, ((StringColumnStatistics) s).getMaximum());
+        assertEquals(minStr, ((StringColumnStatistics) s).getMinimum());
+      }
+    }
+
+    // check out the types
+    List<OrcProto.Type> types = reader.getTypes();
+    assertEquals(3, types.size());
+    assertEquals(OrcProto.Type.Kind.STRUCT, types.get(0).getKind());
+    assertEquals(2, types.get(0).getSubtypesCount());
+    assertEquals(1, types.get(0).getSubtypes(0));
+    assertEquals(2, types.get(0).getSubtypes(1));
+    assertEquals(OrcProto.Type.Kind.INT, types.get(1).getKind());
+    assertEquals(0, types.get(1).getSubtypesCount());
+    assertEquals(OrcProto.Type.Kind.STRING, types.get(2).getKind());
+    assertEquals(0, types.get(2).getSubtypesCount());
+
+    // read the contents and make sure they match
+    RecordReader rows1 = reader.rows(new boolean[]{true, true, false});
+    RecordReader rows2 = reader.rows(new boolean[]{true, false, true});
+    r1 = new Random(1);
+    r2 = new Random(2);
+    OrcStruct row1 = null;
+    OrcStruct row2 = null;
+    for(int i = 0; i < 21000; ++i) {
+      assertEquals(true, rows1.hasNext());
+      assertEquals(true, rows2.hasNext());
+      row1 = (OrcStruct) rows1.next(row1);
+      row2 = (OrcStruct) rows2.next(row2);
+      assertEquals(r1.nextInt(), ((IntWritable) row1.getFieldValue(0)).get());
+      assertEquals(Long.toHexString(r2.nextLong()),
+          row2.getFieldValue(1).toString());
+    }
+    assertEquals(false, rows1.hasNext());
+    assertEquals(false, rows2.hasNext());
+    rows1.close();
+    rows2.close();
+  }
+
+  @Test
+  public void emptyFile() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100));
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    assertEquals(false, reader.rows().hasNext());
+    assertEquals(CompressionKind.NONE, reader.getCompression());
+    assertEquals(0, reader.getNumberOfRows());
+    assertEquals(0, reader.getCompressionSize());
+    assertEquals(false, reader.getMetadataKeys().iterator().hasNext());
+    assertEquals(3, reader.getContentLength());
+    assertEquals(false, reader.getStripes().iterator().hasNext());
+  }
+
+  @Test
+  public void metaData() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100));
+    writer.addUserMetadata("my.meta", byteBuf(1, 2, 3, 4, 5, 6, 7, -1, -2, 127,
+                                              -128));
+    writer.addUserMetadata("clobber", byteBuf(1, 2, 3));
+    writer.addUserMetadata("clobber", byteBuf(4, 3, 2, 1));
+    ByteBuffer bigBuf = ByteBuffer.allocate(40000);
+    Random random = new Random(0);
+    random.nextBytes(bigBuf.array());
+    writer.addUserMetadata("big", bigBuf);
+    bigBuf.position(0);
+    writer.addRow(new BigRow(true, (byte) 127, (short) 1024, 42,
+        42L * 1024 * 1024 * 1024, (float) 3.1415, -2.713, null,
+        null, null, null, null));
+    writer.addUserMetadata("clobber", byteBuf(5,7,11,13,17,19));
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    assertEquals(byteBuf(5,7,11,13,17,19), reader.getMetadataValue("clobber"));
+    assertEquals(byteBuf(1,2,3,4,5,6,7,-1,-2,127,-128),
+        reader.getMetadataValue("my.meta"));
+    assertEquals(bigBuf, reader.getMetadataValue("big"));
+    try {
+      reader.getMetadataValue("unknown");
+      assertTrue(false);
+    } catch (IllegalArgumentException iae) {
+      // PASS
+    }
+    int i = 0;
+    for(String key: reader.getMetadataKeys()) {
+      if ("my.meta".equals(key) ||
+          "clobber".equals(key) ||
+          "big".equals(key)) {
+        i += 1;
+      } else {
+        throw new IllegalArgumentException("unknown key " + key);
+      }
+    }
+    assertEquals(3, i);
+    Metadata metadata = reader.getMetadata();
+    int numStripes = metadata.getStripeStatistics().size();
+    assertEquals(1, numStripes);
+  }
+
+  /**
+   * Generate an ORC file with a range of dates and times.
+   */
+  public void createOrcDateFile(Path file, int minYear, int maxYear
+                                ) throws IOException {
+    List<OrcProto.Type> types = new ArrayList<OrcProto.Type>();
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRUCT).
+        addFieldNames("time").addFieldNames("date").
+        addSubtypes(1).addSubtypes(2).build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.TIMESTAMP).
+        build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.DATE).
+        build());
+
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = OrcStruct.createObjectInspector(0, types);
+    }
+    Writer writer = OrcFile.createWriter(file,
+        OrcFile.writerOptions(conf)
+            .inspector(inspector)
+            .stripeSize(100000)
+            .bufferSize(10000)
+            .blockPadding(false));
+    OrcStruct row = new OrcStruct(2);
+    for (int year = minYear; year < maxYear; ++year) {
+      for (int ms = 1000; ms < 2000; ++ms) {
+        row.setFieldValue(0,
+            new TimestampWritable(Timestamp.valueOf(year + "-05-05 12:34:56."
+                + ms)));
+        row.setFieldValue(1,
+            new DateWritable(new Date(year - 1900, 11, 25)));
+        writer.addRow(row);
+      }
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(file,
+        OrcFile.readerOptions(conf));
+    RecordReader rows = reader.rows();
+    for (int year = minYear; year < maxYear; ++year) {
+      for(int ms = 1000; ms < 2000; ++ms) {
+        row = (OrcStruct) rows.next(row);
+        assertEquals(new TimestampWritable
+                (Timestamp.valueOf(year + "-05-05 12:34:56." + ms)),
+            row.getFieldValue(0));
+        assertEquals(new DateWritable(new Date(year - 1900, 11, 25)),
+            row.getFieldValue(1));
+      }
+    }
+  }
+
+  @Test
+  public void testDate1900() throws Exception {
+    createOrcDateFile(testFilePath, 1900, 1970);
+  }
+
+  @Test
+  public void testDate2038() throws Exception {
+    createOrcDateFile(testFilePath, 2038, 2250);
+  }
+
+  /**
+     * We test union, timestamp, and decimal separately since we need to make the
+     * object inspector manually. (The Hive reflection-based doesn't handle
+     * them properly.)
+     */
+  @Test
+  public void testUnionAndTimestamp() throws Exception {
+    List<OrcProto.Type> types = new ArrayList<OrcProto.Type>();
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRUCT).
+        addFieldNames("time").addFieldNames("union").addFieldNames("decimal").
+        addSubtypes(1).addSubtypes(2).addSubtypes(5).build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.TIMESTAMP).
+        build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.UNION).
+        addSubtypes(3).addSubtypes(4).build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.INT).
+        build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRING).
+        build());
+    types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.DECIMAL).
+        build());
+
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = OrcStruct.createObjectInspector(0, types);
+    }
+    HiveDecimal maxValue = HiveDecimal.create("10000000000000000000");
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100)
+                                         .blockPadding(false));
+    OrcStruct row = new OrcStruct(3);
+    OrcUnion union = new OrcUnion();
+    row.setFieldValue(1, union);
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
+    HiveDecimal value = HiveDecimal.create("12345678.6547456");
+    row.setFieldValue(2, new HiveDecimalWritable(value));
+    union.set((byte) 0, new IntWritable(42));
+    writer.addRow(row);
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
+    union.set((byte) 1, new Text("hello"));
+    value = HiveDecimal.create("-5643.234");
+    row.setFieldValue(2, new HiveDecimalWritable(value));
+    writer.addRow(row);
+    row.setFieldValue(0, null);
+    row.setFieldValue(1, null);
+    row.setFieldValue(2, null);
+    writer.addRow(row);
+    row.setFieldValue(1, union);
+    union.set((byte) 0, null);
+    writer.addRow(row);
+    union.set((byte) 1, null);
+    writer.addRow(row);
+    union.set((byte) 0, new IntWritable(200000));
+    row.setFieldValue(0, new TimestampWritable
+        (Timestamp.valueOf("1970-01-01 00:00:00")));
+    value = HiveDecimal.create("10000000000000000000");
+    row.setFieldValue(2, new HiveDecimalWritable(value));
+    writer.addRow(row);
+    Random rand = new Random(42);
+    for(int i=1970; i < 2038; ++i) {
+      row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf(i +
+          "-05-05 12:34:56." + i)));
+      if ((i & 1) == 0) {
+        union.set((byte) 0, new IntWritable(i*i));
+      } else {
+        union.set((byte) 1, new Text(Integer.toString(i * i)));
+      }
+      value = HiveDecimal.create(new BigInteger(64, rand),
+          rand.nextInt(18));
+      row.setFieldValue(2, new HiveDecimalWritable(value));
+      if (maxValue.compareTo(value) < 0) {
+        maxValue = value;
+      }
+      writer.addRow(row);
+    }
+    // let's add a lot of constant rows to test the rle
+    row.setFieldValue(0, null);
+    union.set((byte) 0, new IntWritable(1732050807));
+    row.setFieldValue(2, null);
+    for(int i=0; i < 5000; ++i) {
+      writer.addRow(row);
+    }
+    union.set((byte) 0, new IntWritable(0));
+    writer.addRow(row);
+    union.set((byte) 0, new IntWritable(10));
+    writer.addRow(row);
+    union.set((byte) 0, new IntWritable(138));
+    writer.addRow(row);
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+
+    assertEquals(6, OrcUtils.getFlattenedColumnsCount(inspector));
+    boolean[] expected = new boolean[] {false, false, false, false, false, false};
+    boolean[] included = OrcUtils.includeColumns("", "time,union,decimal", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, true, false, false, false, true};
+    included = OrcUtils.includeColumns("time,decimal", "time,union,decimal", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    expected = new boolean[] {false, false, true, true, true, false};
+    included = OrcUtils.includeColumns("union", "time,union,decimal", inspector);
+    assertEquals(true, Arrays.equals(expected, included));
+
+    assertEquals(false, reader.getMetadataKeys().iterator().hasNext());
+    assertEquals(5077, reader.getNumberOfRows());
+    DecimalColumnStatistics stats =
+        (DecimalColumnStatistics) reader.getStatistics()[5];
+    assertEquals(71, stats.getNumberOfValues());
+    assertEquals(HiveDecimal.create("-5643.234"), stats.getMinimum());
+    assertEquals(maxValue, stats.getMaximum());
+    // TODO: fix this
+//    assertEquals(null,stats.getSum());
+    int stripeCount = 0;
+    int rowCount = 0;
+    long currentOffset = -1;
+    for(StripeInformation stripe: reader.getStripes()) {
+      stripeCount += 1;
+      rowCount += stripe.getNumberOfRows();
+      if (currentOffset < 0) {
+        currentOffset = stripe.getOffset() + stripe.getLength();
+      } else {
+        assertEquals(currentOffset, stripe.getOffset());
+        currentOffset += stripe.getLength();
+      }
+    }
+    assertEquals(reader.getNumberOfRows(), rowCount);
+    assertEquals(2, stripeCount);
+    assertEquals(reader.getContentLength(), currentOffset);
+    RecordReader rows = reader.rows();
+    assertEquals(0, rows.getRowNumber());
+    assertEquals(0.0, rows.getProgress(), 0.000001);
+    assertEquals(true, rows.hasNext());
+    row = (OrcStruct) rows.next(null);
+    assertEquals(1, rows.getRowNumber());
+    inspector = reader.getObjectInspector();
+    assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>",
+        inspector.getTypeName());
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),
+        row.getFieldValue(0));
+    union = (OrcUnion) row.getFieldValue(1);
+    assertEquals(0, union.getTag());
+    assertEquals(new IntWritable(42), union.getObject());
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("12345678.6547456")),
+        row.getFieldValue(2));
+    row = (OrcStruct) rows.next(row);
+    assertEquals(2, rows.getRowNumber());
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+        row.getFieldValue(0));
+    assertEquals(1, union.getTag());
+    assertEquals(new Text("hello"), union.getObject());
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")),
+        row.getFieldValue(2));
+    row = (OrcStruct) rows.next(row);
+    assertEquals(null, row.getFieldValue(0));
+    assertEquals(null, row.getFieldValue(1));
+    assertEquals(null, row.getFieldValue(2));
+    row = (OrcStruct) rows.next(row);
+    assertEquals(null, row.getFieldValue(0));
+    union = (OrcUnion) row.getFieldValue(1);
+    assertEquals(0, union.getTag());
+    assertEquals(null, union.getObject());
+    assertEquals(null, row.getFieldValue(2));
+    row = (OrcStruct) rows.next(row);
+    assertEquals(null, row.getFieldValue(0));
+    assertEquals(1, union.getTag());
+    assertEquals(null, union.getObject());
+    assertEquals(null, row.getFieldValue(2));
+    row = (OrcStruct) rows.next(row);
+    assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")),
+        row.getFieldValue(0));
+    assertEquals(new IntWritable(200000), union.getObject());
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),
+                 row.getFieldValue(2));
+    rand = new Random(42);
+    for(int i=1970; i < 2038; ++i) {
+      row = (OrcStruct) rows.next(row);
+      assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
+          row.getFieldValue(0));
+      if ((i & 1) == 0) {
+        assertEquals(0, union.getTag());
+        assertEquals(new IntWritable(i*i), union.getObject());
+      } else {
+        assertEquals(1, union.getTag());
+        assertEquals(new Text(Integer.toString(i * i)), union.getObject());
+      }
+      assertEquals(new HiveDecimalWritable(HiveDecimal.create(new BigInteger(64, rand),
+                                   rand.nextInt(18))), row.getFieldValue(2));
+    }
+    for(int i=0; i < 5000; ++i) {
+      row = (OrcStruct) rows.next(row);
+      assertEquals(new IntWritable(1732050807), union.getObject());
+    }
+    row = (OrcStruct) rows.next(row);
+    assertEquals(new IntWritable(0), union.getObject());
+    row = (OrcStruct) rows.next(row);
+    assertEquals(new IntWritable(10), union.getObject());
+    row = (OrcStruct) rows.next(row);
+    assertEquals(new IntWritable(138), union.getObject());
+    assertEquals(false, rows.hasNext());
+    assertEquals(1.0, rows.getProgress(), 0.00001);
+    assertEquals(reader.getNumberOfRows(), rows.getRowNumber());
+    rows.seekToRow(1);
+    row = (OrcStruct) rows.next(row);
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+        row.getFieldValue(0));
+    assertEquals(1, union.getTag());
+    assertEquals(new Text("hello"), union.getObject());
+    assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")), row.getFieldValue(2));
+    rows.close();
+  }
+
+  /**
+   * Read and write a randomly generated snappy file.
+   * @throws Exception
+   */
+  @Test
+  public void testSnappy() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.SNAPPY)
+                                         .bufferSize(100));
+    Random rand = new Random(12);
+    for(int i=0; i < 10000; ++i) {
+      writer.addRow(new InnerStruct(rand.nextInt(),
+          Integer.toHexString(rand.nextInt())));
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    RecordReader rows = reader.rows();
+    rand = new Random(12);
+    OrcStruct row = null;
+    for(int i=0; i < 10000; ++i) {
+      assertEquals(true, rows.hasNext());
+      row = (OrcStruct) rows.next(row);
+      assertEquals(rand.nextInt(), ((IntWritable) row.getFieldValue(0)).get());
+      assertEquals(Integer.toHexString(rand.nextInt()),
+          row.getFieldValue(1).toString());
+    }
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+  /**
+   * Read and write a randomly generated snappy file.
+   * @throws Exception
+   */
+  @Test
+  public void testWithoutIndex() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(5000)
+                                         .compress(CompressionKind.SNAPPY)
+                                         .bufferSize(1000)
+                                         .rowIndexStride(0));
+    Random rand = new Random(24);
+    for(int i=0; i < 10000; ++i) {
+      InnerStruct row = new InnerStruct(rand.nextInt(),
+          Integer.toBinaryString(rand.nextInt()));
+      for(int j=0; j< 5; ++j) {
+        writer.addRow(row);
+      }
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    assertEquals(50000, reader.getNumberOfRows());
+    assertEquals(0, reader.getRowIndexStride());
+    StripeInformation stripe = reader.getStripes().iterator().next();
+    assertEquals(true, stripe.getDataLength() != 0);
+    assertEquals(0, stripe.getIndexLength());
+    RecordReader rows = reader.rows();
+    rand = new Random(24);
+    OrcStruct row = null;
+    for(int i=0; i < 10000; ++i) {
+      int intVal = rand.nextInt();
+      String strVal = Integer.toBinaryString(rand.nextInt());
+      for(int j=0; j < 5; ++j) {
+        assertEquals(true, rows.hasNext());
+        row = (OrcStruct) rows.next(row);
+        assertEquals(intVal, ((IntWritable) row.getFieldValue(0)).get());
+        assertEquals(strVal, row.getFieldValue(1).toString());
+      }
+    }
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+  @Test
+  public void testSeek() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(200000)
+                                         .bufferSize(65536)
+                                         .rowIndexStride(1000));
+    Random rand = new Random(42);
+    final int COUNT=32768;
+    long[] intValues= new long[COUNT];
+    double[] doubleValues = new double[COUNT];
+    String[] stringValues = new String[COUNT];
+    BytesWritable[] byteValues = new BytesWritable[COUNT];
+    String[] words = new String[128];
+    for(int i=0; i < words.length; ++i) {
+      words[i] = Integer.toHexString(rand.nextInt());
+    }
+    for(int i=0; i < COUNT/2; ++i) {
+      intValues[2*i] = rand.nextLong();
+      intValues[2*i+1] = intValues[2*i];
+      stringValues[2*i] = words[rand.nextInt(words.length)];
+      stringValues[2*i+1] = stringValues[2*i];
+    }
+    for(int i=0; i < COUNT; ++i) {
+      doubleValues[i] = rand.nextDouble();
+      byte[] buf = new byte[20];
+      rand.nextBytes(buf);
+      byteValues[i] = new BytesWritable(buf);
+    }
+    for(int i=0; i < COUNT; ++i) {
+      writer.addRow(createRandomRow(intValues, doubleValues, stringValues,
+          byteValues, words, i));
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    assertEquals(COUNT, reader.getNumberOfRows());
+    RecordReader rows = reader.rows();
+    OrcStruct row = null;
+    for(int i=COUNT-1; i >= 0; --i) {
+      rows.seekToRow(i);
+      row = (OrcStruct) rows.next(row);
+      BigRow expected = createRandomRow(intValues, doubleValues,
+          stringValues, byteValues, words, i);
+      assertEquals(expected.boolean1.booleanValue(),
+          ((BooleanWritable) row.getFieldValue(0)).get());
+      assertEquals(expected.byte1.byteValue(),
+          ((ByteWritable) row.getFieldValue(1)).get());
+      assertEquals(expected.short1.shortValue(),
+          ((ShortWritable) row.getFieldValue(2)).get());
+      assertEquals(expected.int1.intValue(),
+          ((IntWritable) row.getFieldValue(3)).get());
+      assertEquals(expected.long1.longValue(),
+          ((LongWritable) row.getFieldValue(4)).get());
+      assertEquals(expected.float1,
+          ((FloatWritable) row.getFieldValue(5)).get(), 0.0001);
+      assertEquals(expected.double1,
+          ((DoubleWritable) row.getFieldValue(6)).get(), 0.0001);
+      assertEquals(expected.bytes1, row.getFieldValue(7));
+      assertEquals(expected.string1, row.getFieldValue(8));
+      List<InnerStruct> expectedList = expected.middle.list;
+      List<OrcStruct> actualList =
+          (List<OrcStruct>) ((OrcStruct) row.getFieldValue(9)).getFieldValue(0);
+      compareList(expectedList, actualList);
+      compareList(expected.list, (List<OrcStruct>) row.getFieldValue(10));
+    }
+    rows.close();
+    Iterator<StripeInformation> stripeIterator =
+      reader.getStripes().iterator();
+    long offsetOfStripe2 = 0;
+    long offsetOfStripe4 = 0;
+    long lastRowOfStripe2 = 0;
+    for(int i = 0; i < 5; ++i) {
+      StripeInformation stripe = stripeIterator.next();
+      if (i < 2) {
+        lastRowOfStripe2 += stripe.getNumberOfRows();
+      } else if (i == 2) {
+        offsetOfStripe2 = stripe.getOffset();
+        lastRowOfStripe2 += stripe.getNumberOfRows() - 1;
+      } else if (i == 4) {
+        offsetOfStripe4 = stripe.getOffset();
+      }
+    }
+    boolean[] columns = new boolean[reader.getStatistics().length];
+    columns[5] = true; // long colulmn
+    columns[9] = true; // text column
+    rows = reader.rowsOptions(new Reader.Options()
+        .range(offsetOfStripe2, offsetOfStripe4 - offsetOfStripe2)
+        .include(columns));
+    rows.seekToRow(lastRowOfStripe2);
+    for(int i = 0; i < 2; ++i) {
+      row = (OrcStruct) rows.next(row);
+      BigRow expected = createRandomRow(intValues, doubleValues,
+                                        stringValues, byteValues, words,
+                                        (int) (lastRowOfStripe2 + i));
+
+      assertEquals(expected.long1.longValue(),
+          ((LongWritable) row.getFieldValue(4)).get());
+      assertEquals(expected.string1, row.getFieldValue(8));
+    }
+    rows.close();
+  }
+
+  @Test
+  public void testZeroCopySeek() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(200000)
+                                         .bufferSize(65536)
+                                         .rowIndexStride(1000));
+    Random rand = new Random(42);
+    final int COUNT=32768;
+    long[] intValues= new long[COUNT];
+    double[] doubleValues = new double[COUNT];
+    String[] stringValues = new String[COUNT];
+    BytesWritable[] byteValues = new BytesWritable[COUNT];
+    String[] words = new String[128];
+    for(int i=0; i < words.length; ++i) {
+      words[i] = Integer.toHexString(rand.nextInt());
+    }
+    for(int i=0; i < COUNT/2; ++i) {
+      intValues[2*i] = rand.nextLong();
+      intValues[2*i+1] = intValues[2*i];
+      stringValues[2*i] = words[rand.nextInt(words.length)];
+      stringValues[2*i+1] = stringValues[2*i];
+    }
+    for(int i=0; i < COUNT; ++i) {
+      doubleValues[i] = rand.nextDouble();
+      byte[] buf = new byte[20];
+      rand.nextBytes(buf);
+      byteValues[i] = new BytesWritable(buf);
+    }
+    for(int i=0; i < COUNT; ++i) {
+      writer.addRow(createRandomRow(intValues, doubleValues, stringValues,
+          byteValues, words, i));
+    }
+    writer.close();
+    writer = null;
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    assertEquals(COUNT, reader.getNumberOfRows());
+    /* enable zero copy record reader */
+    Configuration conf = new Configuration();
+    HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_ORC_ZEROCOPY, true);
+    RecordReader rows = reader.rows();
+    /* all tests are identical to the other seek() tests */
+    OrcStruct row = null;
+    for(int i=COUNT-1; i >= 0; --i) {
+      rows.seekToRow(i);
+      row = (OrcStruct) rows.next(row);
+      BigRow expected = createRandomRow(intValues, doubleValues,
+          stringValues, byteValues, words, i);
+      assertEquals(expected.boolean1.booleanValue(),
+          ((BooleanWritable) row.getFieldValue(0)).get());
+      assertEquals(expected.byte1.byteValue(),
+          ((ByteWritable) row.getFieldValue(1)).get());
+      assertEquals(expected.short1.shortValue(),
+          ((ShortWritable) row.getFieldValue(2)).get());
+      assertEquals(expected.int1.intValue(),
+          ((IntWritable) row.getFieldValue(3)).get());
+      assertEquals(expected.long1.longValue(),
+          ((LongWritable) row.getFieldValue(4)).get());
+      assertEquals(expected.float1.floatValue(),
+          ((FloatWritable) row.getFieldValue(5)).get(), 0.0001);
+      assertEquals(expected.double1.doubleValue(),
+          ((DoubleWritable) row.getFieldValue(6)).get(), 0.0001);
+      assertEquals(expected.bytes1, row.getFieldValue(7));
+      assertEquals(expected.string1, row.getFieldValue(8));
+      List<InnerStruct> expectedList = expected.middle.list;
+      List<OrcStruct> actualList =
+          (List) ((OrcStruct) row.getFieldValue(9)).getFieldValue(0);
+      compareList(expectedList, actualList);
+      compareList(expected.list, (List) row.getFieldValue(10));
+    }
+    rows.close();
+    Iterator<StripeInformation> stripeIterator =
+      reader.getStripes().iterator();
+    long offsetOfStripe2 = 0;
+    long offsetOfStripe4 = 0;
+    long lastRowOfStripe2 = 0;
+    for(int i = 0; i < 5; ++i) {
+      StripeInformation stripe = stripeIterator.next();
+      if (i < 2) {
+        lastRowOfStripe2 += stripe.getNumberOfRows();
+      } else if (i == 2) {
+        offsetOfStripe2 = stripe.getOffset();
+        lastRowOfStripe2 += stripe.getNumberOfRows() - 1;
+      } else if (i == 4) {
+        offsetOfStripe4 = stripe.getOffset();
+      }
+    }
+    boolean[] columns = new boolean[reader.getStatistics().length];
+    columns[5] = true; // long colulmn
+    columns[9] = true; // text column
+    /* use zero copy record reader */
+    rows = reader.rowsOptions(new Reader.Options()
+        .range(offsetOfStripe2, offsetOfStripe4 - offsetOfStripe2)
+        .include(columns));
+    rows.seekToRow(lastRowOfStripe2);
+    for(int i = 0; i < 2; ++i) {
+      row = (OrcStruct) rows.next(row);
+      BigRow expected = createRandomRow(intValues, doubleValues,
+                                        stringValues, byteValues, words,
+                                        (int) (lastRowOfStripe2 + i));
+
+      assertEquals(expected.long1.longValue(),
+          ((LongWritable) row.getFieldValue(4)).get());
+      assertEquals(expected.string1, row.getFieldValue(8));
+    }
+    rows.close();
+  }
+
+  private void compareInner(InnerStruct expect,
+                            OrcStruct actual) throws Exception {
+    if (expect == null || actual == null) {
+      assertEquals(null, expect);
+      assertEquals(null, actual);
+    } else {
+      assertEquals(expect.int1, ((IntWritable) actual.getFieldValue(0)).get());
+      assertEquals(expect.string1, actual.getFieldValue(1));
+    }
+  }
+
+  private void compareList(List<InnerStruct> expect,
+                           List<OrcStruct> actual) throws Exception {
+    assertEquals(expect.size(), actual.size());
+    for(int j=0; j < expect.size(); ++j) {
+      compareInner(expect.get(j), actual.get(j));
+    }
+  }
+
+  private BigRow createRandomRow(long[] intValues, double[] doubleValues,
+                                 String[] stringValues,
+                                 BytesWritable[] byteValues,
+                                 String[] words, int i) {
+    InnerStruct inner = new InnerStruct((int) intValues[i], stringValues[i]);
+    InnerStruct inner2 = new InnerStruct((int) (intValues[i] >> 32),
+        words[i % words.length] + "-x");
+    return new BigRow((intValues[i] & 1) == 0, (byte) intValues[i],
+        (short) intValues[i], (int) intValues[i], intValues[i],
+        (float) doubleValues[i], doubleValues[i], byteValues[i],stringValues[i],
+        new MiddleStruct(inner, inner2), list(), map(inner,inner2));
+  }
+
+  private static class MyMemoryManager extends MemoryManager {
+    final long totalSpace;
+    double rate;
+    Path path = null;
+    long lastAllocation = 0;
+    int rows = 0;
+    MemoryManager.Callback callback;
+
+    MyMemoryManager(Configuration conf, long totalSpace, double rate) {
+      super(conf);
+      this.totalSpace = totalSpace;
+      this.rate = rate;
+    }
+
+    @Override
+    void addWriter(Path path, long requestedAllocation,
+                   MemoryManager.Callback callback) {
+      this.path = path;
+      this.lastAllocation = requestedAllocation;
+      this.callback = callback;
+    }
+
+    @Override
+    synchronized void removeWriter(Path path) {
+      this.path = null;
+      this.lastAllocation = 0;
+    }
+
+    @Override
+    long getTotalMemoryPool() {
+      return totalSpace;
+    }
+
+    @Override
+    double getAllocationScale() {
+      return rate;
+    }
+
+    @Override
+    void addedRow() throws IOException {
+      if (++rows % 100 == 0) {
+        callback.checkMemory(rate);
+      }
+    }
+  }
+
+  @Test
+  public void testMemoryManagementV11() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    MyMemoryManager memory = new MyMemoryManager(conf, 10000, 0.1);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .compress(CompressionKind.NONE)
+                                         .stripeSize(50000)
+                                         .bufferSize(100)
+                                         .rowIndexStride(0)
+                                         .memory(memory)
+                                         .version(Version.V_0_11));
+    assertEquals(testFilePath, memory.path);
+    for(int i=0; i < 2500; ++i) {
+      writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
+    }
+    writer.close();
+    assertEquals(null, memory.path);
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    int i = 0;
+    for(StripeInformation stripe: reader.getStripes()) {
+      i += 1;
+      assertTrue("stripe " + i + " is too long at " + stripe.getDataLength(),
+          stripe.getDataLength() < 5000);
+    }
+    assertEquals(25, i);
+    assertEquals(2500, reader.getNumberOfRows());
+  }
+
+  @Test
+  public void testMemoryManagementV12() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    MyMemoryManager memory = new MyMemoryManager(conf, 10000, 0.1);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .compress(CompressionKind.NONE)
+                                         .stripeSize(50000)
+                                         .bufferSize(100)
+                                         .rowIndexStride(0)
+                                         .memory(memory)
+                                         .version(Version.V_0_12));
+    assertEquals(testFilePath, memory.path);
+    for(int i=0; i < 2500; ++i) {
+      writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
+    }
+    writer.close();
+    assertEquals(null, memory.path);
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    int i = 0;
+    for(StripeInformation stripe: reader.getStripes()) {
+      i += 1;
+      assertTrue("stripe " + i + " is too long at " + stripe.getDataLength(),
+          stripe.getDataLength() < 5000);
+    }
+    // with HIVE-7832, the dictionaries will be disabled after writing the first
+    // stripe as there are too many distinct values. Hence only 3 stripes as
+    // compared to 25 stripes in version 0.11 (above test case)
+    assertEquals(3, i);
+    assertEquals(2500, reader.getNumberOfRows());
+  }
+
+  @Test
+  public void testPredicatePushdown() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
+        400000L, CompressionKind.NONE, 500, 1000);
+    for(int i=0; i < 3500; ++i) {
+      writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(testFilePath,
+        OrcFile.readerOptions(conf).filesystem(fs));
+    assertEquals(3500, reader.getNumberOfRows());
+
+    SearchArgument sarg = SearchArgumentFactory.newBuilder()
+        .startAnd()
+          .startNot()
+             .lessThan("int1", 300000)
+          .end()
+          .lessThan("int1", 600000)
+        .end()
+        .build();
+    RecordReader rows = reader.rowsOptions(new Reader.Options()
+        .range(0L, Long.MAX_VALUE)
+        .include(new boolean[]{true, true, true})
+        .searchArgument(sarg, new String[]{null, "int1", "string1"}));
+    assertEquals(1000L, rows.getRowNumber());
+    OrcStruct row = null;
+    for(int i=1000; i < 2000; ++i) {
+      assertTrue(rows.hasNext());
+      row = (OrcStruct) rows.next(row);
+      assertEquals(300 * i, ((IntWritable) row.getFieldValue(0)).get());
+      assertEquals(Integer.toHexString(10*i), row.getFieldValue(1).toString());
+    }
+    assertTrue(!rows.hasNext());
+    assertEquals(3500, rows.getRowNumber());
+
+    // look through the file with no rows selected
+    sarg = SearchArgumentFactory.newBuilder()
+        .startAnd()
+          .lessThan("int1", 0)
+        .end()
+        .build();
+    rows = reader.rowsOptions(new Reader.Options()
+        .range(0L, Long.MAX_VALUE)
+        .include(new boolean[]{true, true, true})
+        .searchArgument(sarg, new String[]{null, "int1", "string1"}));
+    assertEquals(3500L, rows.getRowNumber());
+    assertTrue(!rows.hasNext());
+
+    // select first 100 and last 100 rows
+    sarg = SearchArgumentFactory.newBuilder()
+        .startOr()
+          .lessThan("int1", 300 * 100)
+          .startNot()
+            .lessThan("int1", 300 * 3400)
+          .end()
+        .end()
+        .build();
+    rows = reader.rowsOptions(new Reader.Options()
+        .range(0L, Long.MAX_VALUE)
+        .include(new boolean[]{true, true, true})
+        .searchArgument(sarg, new String[]{null, "int1", "string1"}));
+    row = null;
+    for(int i=0; i < 1000; ++i) {
+      assertTrue(rows.hasNext());
+      assertEquals(i, rows.getRowNumber()

<TRUNCATED>

[10/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
new file mode 100644
index 0000000..707f574
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+import java.sql.Timestamp;
+import java.util.Arrays;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input  expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a constant value.
+ * The third is a constant value.
+ */
+public abstract class IfExprTimestampScalarScalarBase extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column;
+  private Timestamp arg2Scalar;
+  private Timestamp arg3Scalar;
+  private int outputColumn;
+
+  public IfExprTimestampScalarScalarBase(int arg1Column, Timestamp arg2Scalar, Timestamp arg3Scalar,
+      int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Scalar = arg2Scalar;
+    this.arg3Scalar = arg3Scalar;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprTimestampScalarScalarBase() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = false; // output is a scalar which we know is non null
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        outputColVector.fill(arg2Scalar);
+      } else {
+        outputColVector.fill(arg3Scalar);
+      }
+    } else if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3Scalar);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3Scalar);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3Scalar);
+          outputIsNull[i] = false;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3Scalar);
+        }
+        Arrays.fill(outputIsNull, 0, n, false);
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
index 2401abd..eb493bf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
@@ -19,10 +19,14 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 
 /**
  * Utility functions to handle null propagation.
@@ -54,6 +58,81 @@ public class NullUtil {
     }
   }
 
+  /**
+   * Set the data value for all NULL entries to the designated NULL_VALUE.
+   */
+  public static void setNullDataEntriesBytes(
+      BytesColumnVector v, boolean selectedInUse, int[] sel, int n) {
+    if (v.noNulls) {
+      return;
+    } else if (v.isRepeating && v.isNull[0]) {
+      v.vector[0] = null;
+    } else if (selectedInUse) {
+      for (int j = 0; j != n; j++) {
+        int i = sel[j];
+        if(v.isNull[i]) {
+          v.vector[i] = null;
+        }
+      }
+    } else {
+      for (int i = 0; i != n; i++) {
+        if(v.isNull[i]) {
+          v.vector[i] = null;
+        }
+      }
+    }
+  }
+
+  /**
+   * Set the data value for all NULL entries to the designated NULL_VALUE.
+   */
+  public static void setNullDataEntriesTimestamp(
+      TimestampColumnVector v, boolean selectedInUse, int[] sel, int n) {
+    if (v.noNulls) {
+      return;
+    } else if (v.isRepeating && v.isNull[0]) {
+      v.setNullValue(0);
+    } else if (selectedInUse) {
+      for (int j = 0; j != n; j++) {
+        int i = sel[j];
+        if(v.isNull[i]) {
+          v.setNullValue(i);
+        }
+      }
+    } else {
+      for (int i = 0; i != n; i++) {
+        if(v.isNull[i]) {
+          v.setNullValue(i);
+        }
+      }
+    }
+  }
+
+  /**
+   * Set the data value for all NULL entries to the designated NULL_VALUE.
+   */
+  public static void setNullDataEntriesIntervalDayTime(
+      IntervalDayTimeColumnVector v, boolean selectedInUse, int[] sel, int n) {
+    if (v.noNulls) {
+      return;
+    } else if (v.isRepeating && v.isNull[0]) {
+      v.setNullValue(0);
+    } else if (selectedInUse) {
+      for (int j = 0; j != n; j++) {
+        int i = sel[j];
+        if(v.isNull[i]) {
+          v.setNullValue(i);
+        }
+      }
+    } else {
+      for (int i = 0; i != n; i++) {
+        if(v.isNull[i]) {
+          v.setNullValue(i);
+        }
+      }
+    }
+  }
+
   // for use by Column-Scalar and Scalar-Column arithmetic for null propagation
   public static void setNullOutputEntriesColScalar(
       ColumnVector v, boolean selectedInUse, int[] sel, int n) {
@@ -62,8 +141,11 @@ public class NullUtil {
       // No need to set null data entries because the input NaN values
       // will automatically propagate to the output.
       return;
+    } else if (v instanceof LongColumnVector) {
+      setNullDataEntriesLong((LongColumnVector) v, selectedInUse, sel, n);
+    } else if (v instanceof TimestampColumnVector){
+      setNullDataEntriesTimestamp((TimestampColumnVector) v, selectedInUse, sel, n);
     }
-    setNullDataEntriesLong((LongColumnVector) v, selectedInUse, sel, n);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java
new file mode 100644
index 0000000..bc09a3a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampColumnInList.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+import java.util.HashSet;
+
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Output a boolean value indicating if a column is IN a list of constants.
+ */
+public class TimestampColumnInList extends VectorExpression implements ITimestampInExpr {
+  private static final long serialVersionUID = 1L;
+  private int inputCol;
+  private Timestamp[] inListValues;
+  private int outputColumn;
+
+  // The set object containing the IN list.
+  private transient HashSet<Timestamp> inSet;
+
+  public TimestampColumnInList() {
+    super();
+    inSet = null;
+  }
+
+  /**
+   * After construction you must call setInListValues() to add the values to the IN set.
+   */
+  public TimestampColumnInList(int colNum, int outputColumn) {
+    this.inputCol = colNum;
+    this.outputColumn = outputColumn;
+    inSet = null;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    if (inSet == null) {
+      inSet = new HashSet<Timestamp>(inListValues.length);
+      for (Timestamp val : inListValues) {
+        inSet.add(val);
+      }
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[inputCol];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    boolean[] outNulls = outputColVector.isNull;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+    outputColVector.noNulls = inputColVector.noNulls;
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero
+        // Repeating property will not change.
+        outputVector[0] = inSet.contains(inputColVector.asScratchTimestamp(0)) ? 1 : 0;
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inSet.contains(inputColVector.asScratchTimestamp(i)) ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inSet.contains(inputColVector.asScratchTimestamp(i)) ? 1 : 0;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          outputVector[0] = inSet.contains(inputColVector.asScratchTimestamp(0)) ? 1 : 0;
+          outNulls[0] = false;
+        } else {
+          outNulls[0] = true;
+        }
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outNulls[i] = nullPos[i];
+          if (!nullPos[i]) {
+            outputVector[i] = inSet.contains(inputColVector.asScratchTimestamp(i)) ? 1 : 0;
+          }
+        }
+      } else {
+        System.arraycopy(nullPos, 0, outNulls, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            outputVector[i] = inSet.contains(inputColVector.asScratchTimestamp(i)) ? 1 : 0;
+          }
+        }
+      }
+    }
+  }
+
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+
+    // This VectorExpression (IN) is a special case, so don't return a descriptor.
+    return null;
+  }
+
+  public void setInListValues(Timestamp[] a) {
+    this.inListValues = a;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
new file mode 100644
index 0000000..052d57c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * This is a superclass for unary long functions returning strings that operate directly on the
+ * input and set the output.
+ */
+abstract public class TimestampToStringUnaryUDF extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  int inputColumn;
+  int outputColumn;
+
+  public TimestampToStringUnaryUDF(int inputColumn, int outputColumn) {
+    this.inputColumn = inputColumn;
+    this.outputColumn = outputColumn;
+  }
+
+  public TimestampToStringUnaryUDF() {
+    super();
+  }
+
+  abstract protected void func(BytesColumnVector outV, TimestampColumnVector inV, int i);
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[inputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    BytesColumnVector outV = (BytesColumnVector) batch.cols[outputColumn];
+    outV.initBuffer();
+
+    if (n == 0) {
+      //Nothing to do
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      outV.noNulls = true;
+      if (inputColVector.isRepeating) {
+        outV.isRepeating = true;
+        func(outV, inputColVector, 0);
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          func(outV, inputColVector, i);
+        }
+        outV.isRepeating = false;
+      } else {
+        for(int i = 0; i != n; i++) {
+          func(outV, inputColVector, i);
+        }
+        outV.isRepeating = false;
+      }
+    } else {
+
+      // Handle case with nulls. Don't do function if the value is null,
+      // because the data may be undefined for a null value.
+      outV.noNulls = false;
+      if (inputColVector.isRepeating) {
+        outV.isRepeating = true;
+        outV.isNull[0] = inputColVector.isNull[0];
+        if (!inputColVector.isNull[0]) {
+          func(outV, inputColVector, 0);
+        }
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outV.isNull[i] = inputColVector.isNull[i];
+          if (!inputColVector.isNull[i]) {
+            func(outV, inputColVector, i);
+          }
+        }
+        outV.isRepeating = false;
+      } else {
+        System.arraycopy(inputColVector.isNull, 0, outV.isNull, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!inputColVector.isNull[i]) {
+            func(outV, inputColVector, i);
+          }
+        }
+        outV.isRepeating = false;
+      }
+    }
+  }
+
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  public int getInputColumn() {
+    return inputColumn;
+  }
+
+  public void setInputColumn(int inputColumn) {
+    this.inputColumn = inputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "String";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.TIMESTAMP)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java
index c0e4cf0..8fca8a1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java
@@ -144,6 +144,8 @@ public abstract class VectorExpression implements Serializable {
     b.append(this.getClass().getSimpleName());
     b.append("[");
     b.append(this.getOutputColumn());
+    b.append(":");
+    b.append(this.getOutputType());
     b.append("]");
     if (childExpressions != null) {
       b.append("(");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
index d91b880..85dacd7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
@@ -18,12 +18,16 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.io.Writable;
 
 /**
  * Interface used to create Writable objects from vector expression primitives.
@@ -37,6 +41,10 @@ public interface VectorExpressionWriter {
   Object writeValue(byte[] value, int start, int length) throws HiveException;
   Object writeValue(HiveDecimalWritable value) throws HiveException;
   Object writeValue(HiveDecimal value) throws HiveException;
+  Object writeValue(TimestampWritable value) throws HiveException;
+  Object writeValue(Timestamp value) throws HiveException;
+  Object writeValue(HiveIntervalDayTimeWritable value) throws HiveException;
+  Object writeValue(HiveIntervalDayTime value) throws HiveException;
   Object setValue(Object row, ColumnVector column, int columnRow) throws HiveException;
   Object initValue(Object ost) throws HiveException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
index d57a767..c20bc68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
@@ -162,6 +162,66 @@ public final class VectorExpressionWriterFactory {
     public Object setValue(Object field, HiveDecimal value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
+
+    /**
+     * The base implementation must be overridden by the Timestamp specialization
+     */
+    @Override
+    public Object writeValue(Timestamp value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Timestamp specialization
+     */
+    @Override
+    public Object writeValue(TimestampWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Timestamp specialization
+     */
+    public Object setValue(Object field, TimestampWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Timestamp specialization
+     */
+    public Object setValue(Object field, Timestamp value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the HiveIntervalDayTime specialization
+     */
+    @Override
+    public Object writeValue(HiveIntervalDayTimeWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the HiveIntervalDayTime specialization
+     */
+    @Override
+    public Object writeValue(HiveIntervalDayTime value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the HiveIntervalDayTime specialization
+     */
+    public Object setValue(Object field, HiveIntervalDayTimeWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the HiveIntervalDayTime specialization
+     */
+    public Object setValue(Object field, HiveIntervalDayTime value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
   }
 
   /**
@@ -366,6 +426,126 @@ public final class VectorExpressionWriterFactory {
     }
   }
 
+  /**
+   * Specialized writer for TimestampColumnVector. Will throw cast exception
+   * if the wrong vector column is used.
+   */
+  private static abstract class VectorExpressionWriterTimestamp extends VectorExpressionWriterBase {
+    @Override
+    public Object writeValue(ColumnVector column, int row) throws HiveException {
+      TimestampColumnVector dcv = (TimestampColumnVector) column;
+      TimestampWritable timestampWritable = (TimestampWritable) dcv.getScratchWritable();
+      if (timestampWritable == null) {
+        timestampWritable = new TimestampWritable();
+        dcv.setScratchWritable(timestampWritable);
+      }
+      if (dcv.noNulls && !dcv.isRepeating) {
+        return writeValue(TimestampUtils.timestampColumnVectorWritable(dcv, row, timestampWritable));
+      } else if (dcv.noNulls && dcv.isRepeating) {
+        return writeValue(TimestampUtils.timestampColumnVectorWritable(dcv, 0, timestampWritable));
+      } else if (!dcv.noNulls && !dcv.isRepeating && !dcv.isNull[row]) {
+        return writeValue(TimestampUtils.timestampColumnVectorWritable(dcv, row, timestampWritable));
+      } else if (!dcv.noNulls && dcv.isRepeating && !dcv.isNull[0]) {
+        return writeValue(TimestampUtils.timestampColumnVectorWritable(dcv, 0, timestampWritable));
+      } else if (!dcv.noNulls && dcv.isRepeating && dcv.isNull[0]) {
+        return null;
+      } else if (!dcv.noNulls && !dcv.isRepeating && dcv.isNull[row]) {
+        return null;
+      }
+      throw new HiveException(
+          String.format(
+              "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
+              row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
+    }
+
+    @Override
+    public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
+      TimestampColumnVector dcv = (TimestampColumnVector) column;
+      TimestampWritable timestampWritable = (TimestampWritable) dcv.getScratchWritable();
+      if (timestampWritable == null) {
+        timestampWritable = new TimestampWritable();
+        dcv.setScratchWritable(timestampWritable);
+      }
+      if (dcv.noNulls && !dcv.isRepeating) {
+        return setValue(field, TimestampUtils.timestampColumnVectorWritable(dcv, row, timestampWritable));
+      } else if (dcv.noNulls && dcv.isRepeating) {
+        return setValue(field, TimestampUtils.timestampColumnVectorWritable(dcv, 0, timestampWritable));
+      } else if (!dcv.noNulls && !dcv.isRepeating && !dcv.isNull[row]) {
+        return setValue(field, TimestampUtils.timestampColumnVectorWritable(dcv, row, timestampWritable));
+      } else if (!dcv.noNulls && !dcv.isRepeating && dcv.isNull[row]) {
+        return null;
+      } else if (!dcv.noNulls && dcv.isRepeating && !dcv.isNull[0]) {
+        return setValue(field, TimestampUtils.timestampColumnVectorWritable(dcv, 0, timestampWritable));
+      } else if (!dcv.noNulls && dcv.isRepeating && dcv.isNull[0]) {
+        return null;
+      }
+      throw new HiveException(
+          String.format(
+              "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
+              row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
+    }
+  }
+
+  /**
+   * Specialized writer for IntervalDayTimeColumnVector. Will throw cast exception
+   * if the wrong vector column is used.
+   */
+  private static abstract class VectorExpressionWriterIntervalDayTime extends VectorExpressionWriterBase {
+    @Override
+    public Object writeValue(ColumnVector column, int row) throws HiveException {
+      IntervalDayTimeColumnVector dcv = (IntervalDayTimeColumnVector) column;
+      HiveIntervalDayTimeWritable intervalDayTimeWritable = (HiveIntervalDayTimeWritable) dcv.getScratchWritable();
+      if (intervalDayTimeWritable == null) {
+        intervalDayTimeWritable = new HiveIntervalDayTimeWritable();
+        dcv.setScratchWritable(intervalDayTimeWritable);
+      }
+      if (dcv.noNulls && !dcv.isRepeating) {
+        return writeValue(TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, row, intervalDayTimeWritable));
+      } else if (dcv.noNulls && dcv.isRepeating) {
+        return writeValue(TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, 0, intervalDayTimeWritable));
+      } else if (!dcv.noNulls && !dcv.isRepeating && !dcv.isNull[row]) {
+        return writeValue(TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, row, intervalDayTimeWritable));
+      } else if (!dcv.noNulls && dcv.isRepeating && !dcv.isNull[0]) {
+        return writeValue(TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, 0, intervalDayTimeWritable));
+      } else if (!dcv.noNulls && dcv.isRepeating && dcv.isNull[0]) {
+        return null;
+      } else if (!dcv.noNulls && !dcv.isRepeating && dcv.isNull[row]) {
+        return null;
+      }
+      throw new HiveException(
+          String.format(
+              "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
+              row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
+    }
+
+    @Override
+    public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
+      IntervalDayTimeColumnVector dcv = (IntervalDayTimeColumnVector) column;
+      HiveIntervalDayTimeWritable intervalDayTimeWritable = (HiveIntervalDayTimeWritable) dcv.getScratchWritable();
+      if (intervalDayTimeWritable == null) {
+        intervalDayTimeWritable = new HiveIntervalDayTimeWritable();
+        dcv.setScratchWritable(intervalDayTimeWritable);
+      }
+      if (dcv.noNulls && !dcv.isRepeating) {
+        return setValue(field, TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, row, intervalDayTimeWritable));
+      } else if (dcv.noNulls && dcv.isRepeating) {
+        return setValue(field, TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, 0, intervalDayTimeWritable));
+      } else if (!dcv.noNulls && !dcv.isRepeating && !dcv.isNull[row]) {
+        return setValue(field, TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, row, intervalDayTimeWritable));
+      } else if (!dcv.noNulls && !dcv.isRepeating && dcv.isNull[row]) {
+        return null;
+      } else if (!dcv.noNulls && dcv.isRepeating && !dcv.isNull[0]) {
+        return setValue(field, TimestampUtils.intervalDayTimeColumnVectorWritable(dcv, 0, intervalDayTimeWritable));
+      } else if (!dcv.noNulls && dcv.isRepeating && dcv.isNull[0]) {
+        return null;
+      }
+      throw new HiveException(
+          String.format(
+              "Incorrect null/repeating: row:%d noNulls:%b isRepeating:%b isNull[row]:%b isNull[0]:%b",
+              row, dcv.noNulls, dcv.isRepeating, dcv.isNull[row], dcv.isNull[0]));
+    }
+  }
+
     /**
      * Compiles the appropriate vector expression writer based on an expression info (ExprNodeDesc)
      */
@@ -514,6 +694,22 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
+      public Object setValue(Object field, TimestampWritable value) {
+        if (null == field) {
+          field = initValue(null);
+        }
+        return ((SettableTimestampObjectInspector) this.objectInspector).set(field, value);
+      }
+
+      @Override
+      public Object setValue(Object field, Timestamp value) {
+        if (null == field) {
+          field = initValue(null);
+        }
+        return ((SettableTimestampObjectInspector) this.objectInspector).set(field, value);
+      }
+
+      @Override
       public Object initValue(Object ignored) {
         return ((SettableHiveDecimalObjectInspector) this.objectInspector).create(
             HiveDecimal.ZERO);
@@ -560,41 +756,58 @@ public final class VectorExpressionWriterFactory {
   }
 
   private static VectorExpressionWriter genVectorExpressionWritableTimestamp(
-        SettableTimestampObjectInspector fieldObjInspector) throws HiveException {
-    return new VectorExpressionWriterLong() {
+      SettableTimestampObjectInspector fieldObjInspector) throws HiveException {
+
+    return new VectorExpressionWriterTimestamp() {
       private Object obj;
-      private Timestamp ts;
 
-      public VectorExpressionWriter init(SettableTimestampObjectInspector objInspector)
-          throws HiveException {
+      public VectorExpressionWriter init(SettableTimestampObjectInspector objInspector) throws HiveException {
         super.init(objInspector);
-        ts = new Timestamp(0);
         obj = initValue(null);
         return this;
       }
 
       @Override
-      public Object writeValue(long value) {
-        TimestampUtils.assignTimeInNanoSec(value, ts);
-        ((SettableTimestampObjectInspector) this.objectInspector).set(obj, ts);
-        return obj;
+      public Object writeValue(TimestampWritable value) throws HiveException {
+        return ((SettableTimestampObjectInspector) this.objectInspector).set(obj, value);
       }
 
       @Override
-      public Object setValue(Object field, long value) {
+      public Object writeValue(Timestamp value) throws HiveException {
+        return ((SettableTimestampObjectInspector) this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object writeValue(HiveIntervalDayTimeWritable value) throws HiveException {
+        return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object writeValue(HiveIntervalDayTime value) throws HiveException {
+        return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object setValue(Object field, TimestampWritable value) {
         if (null == field) {
           field = initValue(null);
         }
-        TimestampUtils.assignTimeInNanoSec(value, ts);
-        ((SettableTimestampObjectInspector) this.objectInspector).set(field, ts);
-        return field;
+        return ((SettableTimestampObjectInspector) this.objectInspector).set(field, value);
+      }
+
+      @Override
+      public Object setValue(Object field, Timestamp value) {
+        if (null == field) {
+          field = initValue(null);
+        }
+        return ((SettableTimestampObjectInspector) this.objectInspector).set(field, value);
       }
 
       @Override
       public Object initValue(Object ignored) {
         return ((SettableTimestampObjectInspector) this.objectInspector).create(new Timestamp(0));
       }
-   }.init(fieldObjInspector);
+    }.init(fieldObjInspector);
   }
 
   private static VectorExpressionWriter genVectorExpressionWritableIntervalYearMonth(
@@ -638,7 +851,8 @@ public final class VectorExpressionWriterFactory {
 
   private static VectorExpressionWriter genVectorExpressionWritableIntervalDayTime(
       SettableHiveIntervalDayTimeObjectInspector fieldObjInspector) throws HiveException {
-    return new VectorExpressionWriterLong() {
+
+    return new VectorExpressionWriterIntervalDayTime() {
       private Object obj;
       private HiveIntervalDayTime interval;
 
@@ -651,20 +865,33 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object writeValue(long value) {
-        DateUtils.setIntervalDayTimeTotalNanos(interval, value);
-        ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(obj, interval);
-        return obj;
+      public Object writeValue(HiveIntervalDayTimeWritable value) throws HiveException {
+        interval.set(value.getHiveIntervalDayTime());
+        return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(obj, interval);
       }
 
       @Override
-      public Object setValue(Object field, long value) {
+      public Object writeValue(HiveIntervalDayTime value) throws HiveException {
+        interval.set(value);
+        return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(obj, interval);
+      }
+
+      @Override
+      public Object setValue(Object field, HiveIntervalDayTimeWritable value) {
         if (null == field) {
           field = initValue(null);
         }
-        DateUtils.setIntervalDayTimeTotalNanos(interval, value);
-        ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(field, interval);
-        return field;
+        interval.set(value.getHiveIntervalDayTime());
+        return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(field, interval);
+      }
+
+      @Override
+      public Object setValue(Object field, HiveIntervalDayTime value) {
+        if (null == field) {
+          field = initValue(null);
+        }
+        interval.set(value);
+        return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(field, interval);
       }
 
       @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
index c4a70c0..05dd93e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -164,8 +165,8 @@ public class VectorUDFDateAddColCol extends VectorExpression {
   }
 
   protected byte[] evaluateTimestamp(ColumnVector columnVector, int index, long numDays) {
-    LongColumnVector lcv = (LongColumnVector) columnVector;
-    calendar.setTimeInMillis(lcv.vector[index] / 1000000);
+    TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
+    calendar.setTimeInMillis(tcv.getTime(index));
     if (isPositive) {
       calendar.add(Calendar.DATE, (int) numDays);
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
index 9a9c928..59ca61e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -66,6 +67,7 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
     /* every line below this is identical for evaluateLong & evaluateString */
     final int n = inputCol.isRepeating ? 1 : batch.size;
     int[] sel = batch.selected;
+    final boolean selectedInUse = (inputCol.isRepeating == false) && batch.selectedInUse;
 
     if(batch.size == 0) {
       /* n != batch.size when isRepeating */
@@ -79,7 +81,7 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
       case DATE:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.vector[i] = evaluateDate(inputCol, i);
@@ -97,7 +99,7 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -207,8 +209,8 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
   }
 
   protected byte[] evaluateTimestamp(ColumnVector columnVector, int index) {
-    LongColumnVector lcv = (LongColumnVector) columnVector;
-    calendar.setTimeInMillis(lcv.vector[index] / 1000000);
+    TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
+    calendar.setTimeInMillis(tcv.getTime(index));
     if (isPositive) {
       calendar.add(Calendar.DATE, numDays);
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
index e0497a1..2d0a28a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.io.Text;
 
 import java.io.UnsupportedEncodingException;
+import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
@@ -38,6 +39,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
   private int colNum;
   private int outputColumn;
   private long longValue = 0;
+  private Timestamp timestampValue = null;
   private byte[] stringValue = null;
   protected boolean isPositive = true;
   private transient final Calendar calendar = Calendar.getInstance();
@@ -56,6 +58,8 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
 
     if (object instanceof Long) {
       this.longValue = (Long) object;
+    } else if (object instanceof Timestamp) {
+        this.timestampValue = (Timestamp) object;
     } else if (object instanceof byte []) {
       this.stringValue = (byte[]) object;
     }
@@ -72,6 +76,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
     /* every line below this is identical for evaluateLong & evaluateString */
     final int n = inputCol.isRepeating ? 1 : batch.size;
     int[] sel = batch.selected;
+    final boolean selectedInUse = (inputCol.isRepeating == false) && batch.selectedInUse;
     BytesColumnVector outV = (BytesColumnVector) batch.cols[outputColumn];
 
     switch (inputTypes[0]) {
@@ -80,7 +85,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
         break;
 
       case TIMESTAMP:
-        baseDate.setTime(longValue / 1000000);
+        baseDate.setTime(timestampValue.getTime());
         break;
 
       case STRING:
@@ -91,7 +96,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
           break;
         } catch (Exception e) {
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = true;
@@ -117,7 +122,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
 
     if (inputCol.noNulls) {
       outV.noNulls = true;
-      if (batch.selectedInUse) {
+      if (selectedInUse) {
         for(int j=0; j < n; j++) {
           int i = sel[j];
           evaluate(baseDate, inputCol.vector[i], outV, i);
@@ -131,7 +136,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
       // Handle case with nulls. Don't do function if the value is null, to save time,
       // because calling the function can be expensive.
       outV.noNulls = false;
-      if (batch.selectedInUse) {
+      if (selectedInUse) {
         for(int j = 0; j < n; j++) {
           int i = sel[j];
           outV.isNull[i] = inputCol.isNull[i];

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
index 93a54ae..4edf558 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -165,29 +166,8 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
 
     switch (colType) {
       case TIMESTAMP:
-        LongColumnVector lcv = (LongColumnVector) inputColVector;
-        lcv.copySelected(batch.selectedInUse, batch.selected, batch.size, dateVector);
-        if (dateVector.isRepeating) {
-          date.setTime(dateVector.vector[0] / 1000000);
-          dateVector.vector[0] = DateWritable.dateToDays(date);
-        } else {
-          if (batch.selectedInUse) {
-            for (int j = 0; j != size; j++) {
-              int i = batch.selected[j];
-              if (!dateVector.isNull[i]) {
-                date.setTime(dateVector.vector[i] / 1000000);
-                dateVector.vector[i] = DateWritable.dateToDays(date);
-              }
-            }
-          } else {
-            for (int i = 0; i != size; i++) {
-              if (!dateVector.isNull[i]) {
-                date.setTime(dateVector.vector[i] / 1000000);
-                dateVector.vector[i] = DateWritable.dateToDays(date);
-              }
-            }
-          }
-        }
+        TimestampColumnVector tcv = (TimestampColumnVector) inputColVector;
+        copySelected(tcv, batch.selectedInUse, batch.selected, batch.size, dateVector);
         return dateVector;
 
       case STRING:
@@ -280,6 +260,73 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
     }
   }
 
+  // Copy the current object contents into the output. Only copy selected entries,
+  // as indicated by selectedInUse and the sel array.
+  public void copySelected(
+      TimestampColumnVector input, boolean selectedInUse, int[] sel, int size, LongColumnVector output) {
+
+    // Output has nulls if and only if input has nulls.
+    output.noNulls = input.noNulls;
+    output.isRepeating = false;
+
+    // Handle repeating case
+    if (input.isRepeating) {
+      output.isNull[0] = input.isNull[0];
+      output.isRepeating = true;
+
+      if (!input.isNull[0]) {
+        date.setTime(input.getTime(0));
+        output.vector[0] = DateWritable.dateToDays(date);
+      }
+      return;
+    }
+
+    // Handle normal case
+
+    // Copy data values over
+    if (input.noNulls) {
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          date.setTime(input.getTime(i));
+          output.vector[i] = DateWritable.dateToDays(date);
+        }
+      } else {
+        for (int i = 0; i < size; i++) {
+          date.setTime(input.getTime(i));
+          output.vector[i] = DateWritable.dateToDays(date);
+        }
+      }
+    } else {
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          output.isNull[i] = input.isNull[i];
+        }
+      }
+      else {
+        System.arraycopy(input.isNull, 0, output.isNull, 0, size);
+      }
+
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          if (!input.isNull[i]) {
+            date.setTime(input.getTime(i));
+            output.vector[i] = DateWritable.dateToDays(date);
+          }
+        }
+      } else {
+        for (int i = 0; i < size; i++) {
+          if (!input.isNull[i]) {
+            date.setTime(input.getTime(i));
+            output.vector[i] = DateWritable.dateToDays(date);
+          }
+        }
+      }
+    }
+  }
+
   @Override
   public int getOutputColumn() {
     return this.outputColumn;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
index 3df53a4..71b3887 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
@@ -22,12 +22,14 @@ import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.io.Text;
 
 import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 
@@ -37,6 +39,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
   private int colNum;
   private int outputColumn;
   private long longValue;
+  private Timestamp timestampValue;
   private byte[] stringValue;
   private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient final Text text = new Text();
@@ -50,6 +53,8 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
 
     if (object instanceof Long) {
       this.longValue = (Long) object;
+    } else if (object instanceof Timestamp) {
+      this.timestampValue = (Timestamp) object;
     } else if (object instanceof byte []) {
       this.stringValue = (byte []) object;
     }
@@ -71,6 +76,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
     /* every line below this is identical for evaluateLong & evaluateString */
     final int n = inputCol.isRepeating ? 1 : batch.size;
     int[] sel = batch.selected;
+    final boolean selectedInUse = (inputCol.isRepeating == false) && batch.selectedInUse;
 
     if(batch.size == 0) {
       /* n != batch.size when isRepeating */
@@ -86,7 +92,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
         break;
 
       case TIMESTAMP:
-        date.setTime(longValue / 1000000);
+        date.setTime(timestampValue.getTime());
         baseDate = DateWritable.dateToDays(date);
         break;
 
@@ -99,7 +105,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
           break;
         } catch (Exception e) {
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = true;
@@ -119,7 +125,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
       case DATE:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.vector[i] = evaluateDate(inputCol, i);
@@ -133,7 +139,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -155,7 +161,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
       case TIMESTAMP:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.vector[i] = evaluateTimestamp(inputCol, i);
@@ -169,7 +175,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -193,7 +199,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
       case VARCHAR:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               evaluateString(inputCol, outV, i);
@@ -207,7 +213,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -231,8 +237,8 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
   }
 
   protected int evaluateTimestamp(ColumnVector columnVector, int index) {
-    LongColumnVector lcv = (LongColumnVector) columnVector;
-    date.setTime(lcv.vector[index] / 1000000);
+    TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
+    date.setTime(tcv.getTime(index));
     return DateWritable.dateToDays(date) - baseDate;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
index b16a21c..c733bc9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.io.Text;
 
 import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 
@@ -36,6 +38,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
   private int colNum;
   private int outputColumn;
   private long longValue;
+  private Timestamp timestampValue = null;
   private byte[] stringValue;
   private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient final Text text = new Text();
@@ -49,6 +52,8 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
 
     if (object instanceof Long) {
       this.longValue = (Long) object;
+    } else if (object instanceof Timestamp) {
+      this.timestampValue = (Timestamp) object;
     } else if (object instanceof byte []) {
       this.stringValue = (byte[]) object;
     }
@@ -70,6 +75,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
     /* every line below this is identical for evaluateLong & evaluateString */
     final int n = inputCol.isRepeating ? 1 : batch.size;
     int[] sel = batch.selected;
+    final boolean selectedInUse = (inputCol.isRepeating == false) && batch.selectedInUse;
 
     if(batch.size == 0) {
       /* n != batch.size when isRepeating */
@@ -85,7 +91,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
         break;
 
       case TIMESTAMP:
-        date.setTime(longValue / 1000000);
+        date.setTime(timestampValue.getTime());
         baseDate = DateWritable.dateToDays(date);
         break;
 
@@ -98,7 +104,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
           break;
         } catch (Exception e) {
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = true;
@@ -118,7 +124,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
       case DATE:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.vector[i] = evaluateDate(inputCol, i);
@@ -132,7 +138,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -154,7 +160,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
       case TIMESTAMP:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               outV.vector[i] = evaluateTimestamp(inputCol, i);
@@ -168,7 +174,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -192,7 +198,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
       case VARCHAR:
         if (inputCol.noNulls) {
           outV.noNulls = true;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j=0; j < n; j++) {
               int i = sel[j];
               evaluateString(inputCol, outV, i);
@@ -206,7 +212,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
           // Handle case with nulls. Don't do function if the value is null, to save time,
           // because calling the function can be expensive.
           outV.noNulls = false;
-          if (batch.selectedInUse) {
+          if (selectedInUse) {
             for(int j = 0; j < n; j++) {
               int i = sel[j];
               outV.isNull[i] = inputCol.isNull[i];
@@ -230,8 +236,8 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
   }
 
   protected int evaluateTimestamp(ColumnVector columnVector, int index) {
-    LongColumnVector lcv = (LongColumnVector) columnVector;
-    date.setTime(lcv.vector[index] / 1000000);
+    TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
+    date.setTime(tcv.getTime(index));
     return baseDate - DateWritable.dateToDays(date);
   }
 
@@ -302,4 +308,4 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
             VectorExpressionDescriptor.InputExpressionType.COLUMN);
     return b.build();
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
index 9883fe6..a58bfb5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
@@ -47,9 +47,6 @@ public class VectorUDFDateLong extends LongToStringUnaryUDF {
         date.setTime(DateWritable.daysToMillis((int) vector[i]));
         break;
 
-      case TIMESTAMP:
-        date.setTime(vector[i] / 1000000);
-        break;
       default:
         throw new Error("Unsupported input type " + inputTypes[0].name());
     }
@@ -68,7 +65,7 @@ public class VectorUDFDateLong extends LongToStringUnaryUDF {
     b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.DATETIME_FAMILY)
+            VectorExpressionDescriptor.ArgumentType.DATE)
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN);
     return b.build();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
new file mode 100644
index 0000000..cde0be4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+import java.io.UnsupportedEncodingException;
+import java.sql.Date;
+import java.text.SimpleDateFormat;
+
+public class VectorUDFDateTimestamp extends TimestampToStringUnaryUDF {
+  private static final long serialVersionUID = 1L;
+
+  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private transient Date date = new Date(0);
+
+  public VectorUDFDateTimestamp() {
+    super();
+  }
+
+  public VectorUDFDateTimestamp(int inputColumn, int outputColumn) {
+    super(inputColumn, outputColumn);
+  }
+
+  @Override
+  protected void func(BytesColumnVector outV, TimestampColumnVector inV, int i) {
+    switch (inputTypes[0]) {
+      case TIMESTAMP:
+        date.setTime(inV.getTime(i));
+        break;
+
+      default:
+        throw new Error("Unsupported input type " + inputTypes[0].name());
+    }
+    try {
+      byte[] bytes = formatter.format(date).getBytes("UTF-8");
+      outV.setRef(i, bytes, 0, bytes.length);
+    } catch (UnsupportedEncodingException e) {
+      outV.vector[i] = null;
+      outV.isNull[i] = true;
+    }
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.TIMESTAMP)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthDate.java
new file mode 100644
index 0000000..8addb20
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthDate.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get day of month.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFDayOfMonthDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFDayOfMonthDate(int colNum, int outputColumn) {
+    super(Calendar.DAY_OF_MONTH, colNum, outputColumn);
+  }
+
+  public VectorUDFDayOfMonthDate() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthLong.java
deleted file mode 100644
index bbd734c..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthLong.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Calendar;
-
-/**
- * Expression to get day of month.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFDayOfMonthLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  public VectorUDFDayOfMonthLong(int colNum, int outputColumn) {
-    super(Calendar.DAY_OF_MONTH, colNum, outputColumn);
-  }
-
-  public VectorUDFDayOfMonthLong() {
-    super();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthTimestamp.java
new file mode 100644
index 0000000..4df48ee
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfMonthTimestamp.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get day of month.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFDayOfMonthTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFDayOfMonthTimestamp(int colNum, int outputColumn) {
+    super(Calendar.DAY_OF_MONTH, colNum, outputColumn);
+  }
+
+  public VectorUDFDayOfMonthTimestamp() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourDate.java
new file mode 100644
index 0000000..0e33e25
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourDate.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Returns hour of day.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFHourDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFHourDate(int colNum, int outputColumn) {
+    super(Calendar.HOUR_OF_DAY, colNum, outputColumn);
+  }
+
+  public VectorUDFHourDate() {
+    super();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourLong.java
deleted file mode 100644
index 1d4d572..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourLong.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Calendar;
-
-/**
- * Returns hour of day.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFHourLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  public VectorUDFHourLong(int colNum, int outputColumn) {
-    super(Calendar.HOUR_OF_DAY, colNum, outputColumn);
-  }
-
-  public VectorUDFHourLong() {
-    super();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourTimestamp.java
new file mode 100644
index 0000000..93961bc
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFHourTimestamp.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Returns hour of day.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFHourTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFHourTimestamp(int colNum, int outputColumn) {
+    super(Calendar.HOUR_OF_DAY, colNum, outputColumn);
+  }
+
+  public VectorUDFHourTimestamp() {
+    super();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteDate.java
new file mode 100644
index 0000000..98182ae
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteDate.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Returns minute value.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFMinuteDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFMinuteDate(int colNum, int outputColumn) {
+    super(Calendar.MINUTE, colNum, outputColumn);
+  }
+
+  public VectorUDFMinuteDate() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteLong.java
deleted file mode 100644
index 4e3eede..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteLong.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Calendar;
-
-/**
- * Returns minute value.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFMinuteLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  public VectorUDFMinuteLong(int colNum, int outputColumn) {
-    super(Calendar.MINUTE, colNum, outputColumn);
-  }
-
-  public VectorUDFMinuteLong() {
-    super();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteTimestamp.java
new file mode 100644
index 0000000..7e4a262
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMinuteTimestamp.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Returns minute value.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFMinuteTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFMinuteTimestamp(int colNum, int outputColumn) {
+    super(Calendar.MINUTE, colNum, outputColumn);
+  }
+
+  public VectorUDFMinuteTimestamp() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthDate.java
new file mode 100644
index 0000000..aac8ab7
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthDate.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Returns month value.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFMonthDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFMonthDate(int colNum, int outputColumn) {
+    super(Calendar.MONTH, colNum, outputColumn);
+  }
+
+  public VectorUDFMonthDate() {
+    super();
+  }
+
+  @Override
+  protected long getDateField(long days) {
+    /* january is 0 */
+    return 1 + super.getDateField(days);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthLong.java
deleted file mode 100644
index 58724a4..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthLong.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Calendar;
-
-/**
- * Returns month value.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFMonthLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  public VectorUDFMonthLong(int colNum, int outputColumn) {
-    super(Calendar.MONTH, colNum, outputColumn);
-  }
-
-  public VectorUDFMonthLong() {
-    super();
-  }
-
-  @Override
-  protected long getTimestampField(long time) {
-    /* january is 0 */
-    return 1 + super.getTimestampField(time);
-  }
-
-  @Override
-  protected long getDateField(long days) {
-    /* january is 0 */
-    return 1 + super.getDateField(days);
-  }
-}


[06/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
index 419254b..d4f1f6f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
@@ -32,10 +32,12 @@ import java.util.Random;
 import junit.framework.Assert;
 
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
 import org.apache.hadoop.hive.ql.udf.UDFHour;
@@ -56,51 +58,42 @@ import org.junit.Test;
 public class TestVectorTimestampExpressions {
   private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
-  /* copied over from VectorUDFTimestampFieldLong */
-  private TimestampWritable toTimestampWritable(long nanos) {
-    long ms = (nanos / (1000 * 1000 * 1000)) * 1000;
-    /* the milliseconds should be kept in nanos */
-    long ns = nanos % (1000*1000*1000);
-    if (ns < 0) {
-      /*
-       * The nano seconds are always positive,
-       * but the milliseconds can be negative
-       */
-      ms -= 1000;
-      ns += 1000*1000*1000;
-    }
-    Timestamp ts = new Timestamp(ms);
-    ts.setNanos((int) ns);
-    return new TimestampWritable(ts);
-  }
-
-  private long[] getAllBoundaries() {
-    List<Long> boundaries = new ArrayList<Long>(1);
+  private Timestamp[] getAllBoundaries(int minYear, int maxYear) {
+     ArrayList<Timestamp> boundaries = new ArrayList<Timestamp>(1);
     Calendar c = Calendar.getInstance();
     c.setTimeInMillis(0); // c.set doesn't reset millis
-    for (int year = 1902; year <= 2038; year++) {
+    for (int year = minYear; year <= maxYear; year++) {
       c.set(year, Calendar.JANUARY, 1, 0, 0, 0);
-      long exactly = c.getTimeInMillis() * 1000 * 1000;
+      if (c.get(Calendar.YEAR) < 0 || c.get(Calendar.YEAR) >= 10000) {
+        continue;
+      }
+      long exactly = c.getTimeInMillis();
       /* one second before and after */
-      long before = exactly - 1000 * 1000 * 1000;
-      long after = exactly + 1000 * 1000 * 1000;
-      boundaries.add(Long.valueOf(before));
-      boundaries.add(Long.valueOf(exactly));
-      boundaries.add(Long.valueOf(after));
+      long before = exactly - 1000;
+      long after = exactly + 1000;
+      if (minYear != 0) {
+        boundaries.add(new Timestamp(before));
+      }
+      boundaries.add(new Timestamp(exactly));
+      if (year != maxYear) {
+        boundaries.add(new Timestamp(after));
+      }
     }
-    Long[] indices = boundaries.toArray(new Long[1]);
-    return ArrayUtils.toPrimitive(indices);
+    return boundaries.toArray(new Timestamp[0]);
+  }
+
+  private Timestamp[] getAllBoundaries() {
+    return getAllBoundaries(RandomTypeUtil.MIN_YEAR, RandomTypeUtil.MAX_YEAR);
   }
 
-  private VectorizedRowBatch getVectorizedRandomRowBatchLong2(int seed, int size) {
+  private VectorizedRowBatch getVectorizedRandomRowBatchTimestampLong(int seed, int size) {
     VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
-    LongColumnVector lcv = new LongColumnVector(size);
+    TimestampColumnVector tcv = new TimestampColumnVector(size);
     Random rand = new Random(seed);
     for (int i = 0; i < size; i++) {
-      /* all 32 bit numbers qualify & multiply up to get nano-seconds */
-      lcv.vector[i] = (long)(1000*1000*1000*rand.nextInt());
+      tcv.set(i, RandomTypeUtil.getRandTimestamp(rand));
     }
-    batch.cols[0] = lcv;
+    batch.cols[0] = tcv;
     batch.cols[1] = new LongColumnVector(size);
     batch.size = size;
     return batch;
@@ -112,7 +105,7 @@ public class TestVectorTimestampExpressions {
     Random rand = new Random(seed);
     for (int i = 0; i < size; i++) {
       /* all 32 bit numbers qualify & multiply up to get nano-seconds */
-      byte[] encoded = encodeTime(1000 * 1000 * 1000 * rand.nextInt());
+      byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand));
       bcv.vector[i] = encoded;
       bcv.start[i] = 0;
       bcv.length[i] = encoded.length;
@@ -125,8 +118,8 @@ public class TestVectorTimestampExpressions {
 
   private VectorizedRowBatch getVectorizedRandomRowBatch(int seed, int size, TestType testType) {
     switch (testType) {
-      case LONG2:
-        return getVectorizedRandomRowBatchLong2(seed, size);
+      case TIMESTAMP_LONG:
+        return getVectorizedRandomRowBatchTimestampLong(seed, size);
       case STRING_LONG:
         return getVectorizedRandomRowBatchStringLong(seed, size);
       default:
@@ -137,13 +130,13 @@ public class TestVectorTimestampExpressions {
   /*
    * Input array is used to fill the entire size of the vector row batch
    */
-  private VectorizedRowBatch getVectorizedRowBatchLong2(long[] inputs, int size) {
+  private VectorizedRowBatch getVectorizedRowBatchTimestampLong(Timestamp[] inputs, int size) {
     VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
-    LongColumnVector lcv = new LongColumnVector(size);
+    TimestampColumnVector tcv = new TimestampColumnVector(size);
     for (int i = 0; i < size; i++) {
-      lcv.vector[i] = inputs[i % inputs.length];
+      tcv.set(i, inputs[i % inputs.length]);
     }
-    batch.cols[0] = lcv;
+    batch.cols[0] = tcv;
     batch.cols[1] = new LongColumnVector(size);
     batch.size = size;
     return batch;
@@ -152,7 +145,7 @@ public class TestVectorTimestampExpressions {
   /*
    * Input array is used to fill the entire size of the vector row batch
    */
-  private VectorizedRowBatch getVectorizedRowBatchStringLong(long[] inputs, int size) {
+  private VectorizedRowBatch getVectorizedRowBatchStringLong(Timestamp[] inputs, int size) {
     VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
     BytesColumnVector bcv = new BytesColumnVector(size);
     for (int i = 0; i < size; i++) {
@@ -181,10 +174,10 @@ public class TestVectorTimestampExpressions {
     return batch;
   }
 
-  private VectorizedRowBatch getVectorizedRowBatch(long[] inputs, int size, TestType testType) {
+  private VectorizedRowBatch getVectorizedRowBatch(Timestamp[] inputs, int size, TestType testType) {
     switch (testType) {
-      case LONG2:
-        return getVectorizedRowBatchLong2(inputs, size);
+      case TIMESTAMP_LONG:
+        return getVectorizedRowBatchTimestampLong(inputs, size);
       case STRING_LONG:
         return getVectorizedRowBatchStringLong(inputs, size);
       default:
@@ -192,10 +185,11 @@ public class TestVectorTimestampExpressions {
     }
   }
 
-  private byte[] encodeTime(long time) {
+  private byte[] encodeTime(Timestamp timestamp) {
     ByteBuffer encoded;
+    long time = timestamp.getTime();
     try {
-      String formatted = dateFormat.format(new Date(time / (1000 * 1000)));
+      String formatted = dateFormat.format(new Date(time));
       encoded = Text.encode(formatted);
     } catch (CharacterCodingException e) {
       throw new RuntimeException(e);
@@ -203,17 +197,17 @@ public class TestVectorTimestampExpressions {
     return Arrays.copyOf(encoded.array(), encoded.limit());
   }
 
-  private long decodeTime(byte[] time) {
+  private Timestamp decodeTime(byte[] time) {
     try {
-      return dateFormat.parse(Text.decode(time)).getTime() * 1000 * 1000;
+      return new Timestamp(dateFormat.parse(Text.decode(time)).getTime());
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
   }
 
-  private long readVectorElementAt(ColumnVector col, int i) {
-    if (col instanceof LongColumnVector) {
-      return ((LongColumnVector) col).vector[i];
+  private Timestamp readVectorElementAt(ColumnVector col, int i) {
+    if (col instanceof TimestampColumnVector) {
+      return ((TimestampColumnVector) col).asScratchTimestamp(i);
     }
     if (col instanceof BytesColumnVector) {
       byte[] timeBytes = ((BytesColumnVector) col).vector[i];
@@ -223,20 +217,24 @@ public class TestVectorTimestampExpressions {
   }
 
   private enum TestType {
-    LONG2, STRING_LONG
+    TIMESTAMP_LONG, STRING_LONG
   }
 
-  private void compareToUDFYearLong(long t, int y) {
+  private void compareToUDFYearLong(Timestamp t, int y) {
     UDFYear udf = new UDFYear();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
+    if (res.get() != y) {
+      System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(),
+          tsw.getTimestamp().getTime()/1000);
+    }
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFYear(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf = null;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFYearLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFYearTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFYearString(0, 1);
@@ -251,7 +249,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[in].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFYearLong(t, (int) y);
       } else {
@@ -261,7 +259,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFYear(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -269,7 +267,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFYear(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFYear(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -277,8 +275,17 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFYear(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    verifyUDFYear(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFYear(batch, testType);
+
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
     verifyUDFYear(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
@@ -293,8 +300,8 @@ public class TestVectorTimestampExpressions {
   }
 
   @Test
-  public void testVectorUDFYearLong() {
-    testVectorUDFYear(TestType.LONG2);
+  public void testVectorUDFYearTimestamp() {
+    testVectorUDFYear(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -309,17 +316,17 @@ public class TestVectorTimestampExpressions {
     Assert.assertEquals(true, lcv.isNull[0]);
   }
 
-  private void compareToUDFDayOfMonthLong(long t, int y) {
+  private void compareToUDFDayOfMonthLong(Timestamp t, int y) {
     UDFDayOfMonth udf = new UDFDayOfMonth();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFDayOfMonth(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf = null;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFDayOfMonthLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFDayOfMonthTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFDayOfMonthString(0, 1);
@@ -334,7 +341,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[in].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFDayOfMonthLong(t, (int) y);
       } else {
@@ -344,7 +351,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFDayOfMonth(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -352,7 +359,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFDayOfMonth(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFDayOfMonth(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -360,13 +367,22 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFDayOfMonth(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
     verifyUDFDayOfMonth(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
     verifyUDFDayOfMonth(batch, testType);
 
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
+    verifyUDFDayOfMonth(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFDayOfMonth(batch, testType);
+
     batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE, testType);
     verifyUDFDayOfMonth(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -376,8 +392,8 @@ public class TestVectorTimestampExpressions {
   }
 
   @Test
-  public void testVectorUDFDayOfMonthLong() {
-    testVectorUDFDayOfMonth(TestType.LONG2);
+  public void testVectorUDFDayOfMonthTimestamp() {
+    testVectorUDFDayOfMonth(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -385,17 +401,17 @@ public class TestVectorTimestampExpressions {
     testVectorUDFDayOfMonth(TestType.STRING_LONG);
   }
 
-  private void compareToUDFHourLong(long t, int y) {
+  private void compareToUDFHourLong(Timestamp t, int y) {
     UDFHour udf = new UDFHour();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFHour(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf = null;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFHourLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFHourTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFHourString(0, 1);
@@ -410,7 +426,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[in].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFHourLong(t, (int) y);
       } else {
@@ -420,7 +436,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFHour(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -428,7 +444,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFHour(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFHour(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -436,13 +452,22 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFHour(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
     verifyUDFHour(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
     verifyUDFHour(batch, testType);
 
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
+    verifyUDFHour(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFHour(batch, testType);
+
     batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE, testType);
     verifyUDFHour(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -452,8 +477,8 @@ public class TestVectorTimestampExpressions {
   }
 
   @Test
-  public void testVectorUDFHourLong() {
-    testVectorUDFHour(TestType.LONG2);
+  public void testVectorUDFHourTimestamp() {
+    testVectorUDFHour(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -461,17 +486,17 @@ public class TestVectorTimestampExpressions {
     testVectorUDFHour(TestType.STRING_LONG);
   }
 
-  private void compareToUDFMinuteLong(long t, int y) {
+  private void compareToUDFMinuteLong(Timestamp t, int y) {
     UDFMinute udf = new UDFMinute();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFMinute(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf = null;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFMinuteLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFMinuteTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFMinuteString(0, 1);
@@ -486,7 +511,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[in].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFMinuteLong(t, (int) y);
       } else {
@@ -496,7 +521,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFMinute(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -504,7 +529,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFMinute(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFMinute(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -512,13 +537,22 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFMinute(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
     verifyUDFMinute(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
     verifyUDFMinute(batch, testType);
 
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
+    verifyUDFMinute(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFMinute(batch, testType);
+
     batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE, testType);
     verifyUDFMinute(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -529,7 +563,7 @@ public class TestVectorTimestampExpressions {
 
   @Test
   public void testVectorUDFMinuteLong() {
-    testVectorUDFMinute(TestType.LONG2);
+    testVectorUDFMinute(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -537,17 +571,17 @@ public class TestVectorTimestampExpressions {
     testVectorUDFMinute(TestType.STRING_LONG);
   }
 
-  private void compareToUDFMonthLong(long t, int y) {
+  private void compareToUDFMonthLong(Timestamp t, int y) {
     UDFMonth udf = new UDFMonth();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFMonth(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFMonthLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFMonthTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFMonthString(0, 1);
@@ -562,7 +596,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[in].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFMonthLong(t, (int) y);
       } else {
@@ -572,7 +606,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFMonth(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -580,7 +614,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFMonth(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFMonth(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -588,13 +622,22 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFMonth(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
     verifyUDFMonth(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
     verifyUDFMonth(batch, testType);
 
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
+    verifyUDFMonth(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFMonth(batch, testType);
+
     batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE, testType);
     verifyUDFMonth(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -604,8 +647,8 @@ public class TestVectorTimestampExpressions {
   }
 
   @Test
-  public void testVectorUDFMonthLong() {
-    testVectorUDFMonth(TestType.LONG2);
+  public void testVectorUDFMonthTimestamp() {
+    testVectorUDFMonth(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -613,17 +656,17 @@ public class TestVectorTimestampExpressions {
     testVectorUDFMonth(TestType.STRING_LONG);
   }
 
-  private void compareToUDFSecondLong(long t, int y) {
+  private void compareToUDFSecondLong(Timestamp t, int y) {
     UDFSecond udf = new UDFSecond();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFSecond(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFSecondLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFSecondTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFSecondString(0, 1);
@@ -638,7 +681,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[in].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFSecondLong(t, (int) y);
       } else {
@@ -648,7 +691,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFSecond(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -656,7 +699,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFSecond(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFSecond(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -664,13 +707,22 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFSecond(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
     verifyUDFSecond(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
     verifyUDFSecond(batch, testType);
 
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
+    verifyUDFSecond(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFSecond(batch, testType);
+
     batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE, testType);
     verifyUDFSecond(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -681,7 +733,7 @@ public class TestVectorTimestampExpressions {
 
   @Test
   public void testVectorUDFSecondLong() {
-    testVectorUDFSecond(TestType.LONG2);
+    testVectorUDFSecond(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -689,31 +741,18 @@ public class TestVectorTimestampExpressions {
     testVectorUDFSecond(TestType.STRING_LONG);
   }
 
-  private LongWritable getLongWritable(TimestampWritable i) {
-    LongWritable result = new LongWritable();
-    if (i == null) {
-      return null;
-    } else {
-      result.set(i.getSeconds());
-      return result;
+  private void compareToUDFUnixTimeStampLong(Timestamp ts, long y) {
+    long seconds = ts.getTime() / 1000;
+    if(seconds != y) {
+      System.out.printf("%d vs %d for %s\n", seconds, y, ts.toString());
+      Assert.assertTrue(false);
     }
   }
 
-  private void compareToUDFUnixTimeStampLong(long t, long y) {
-    TimestampWritable tsw = toTimestampWritable(t);
-    LongWritable res = getLongWritable(tsw);
-    if(res.get() != y) {
-      System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t,
-          tsw.getTimestamp().getTime()/1000);
-    }
-
-    Assert.assertEquals(res.get(), y);
-  }
-
   private void verifyUDFUnixTimeStamp(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFUnixTimeStampLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFUnixTimeStampTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFUnixTimeStampString(0, 1);
@@ -728,7 +767,7 @@ public class TestVectorTimestampExpressions {
         if (!batch.cols[out].noNulls) {
           Assert.assertEquals(batch.cols[out].isNull[i], batch.cols[in].isNull[i]);
         }
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFUnixTimeStampLong(t, y);
       } else {
@@ -738,7 +777,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFUnixTimeStamp(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -746,7 +785,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFUnixTimeStamp(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFUnixTimeStamp(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -754,13 +793,22 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFUnixTimeStamp(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
     verifyUDFUnixTimeStamp(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
     verifyUDFUnixTimeStamp(batch, testType);
 
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
+    verifyUDFUnixTimeStamp(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+
+    verifyUDFUnixTimeStamp(batch, testType);
     batch = getVectorizedRandomRowBatch(200, VectorizedRowBatch.DEFAULT_SIZE, testType);
     verifyUDFUnixTimeStamp(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -770,8 +818,8 @@ public class TestVectorTimestampExpressions {
   }
 
   @Test
-  public void testVectorUDFUnixTimeStampLong() {
-    testVectorUDFUnixTimeStamp(TestType.LONG2);
+  public void testVectorUDFUnixTimeStampTimestamp() {
+    testVectorUDFUnixTimeStamp(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -779,17 +827,17 @@ public class TestVectorTimestampExpressions {
     testVectorUDFUnixTimeStamp(TestType.STRING_LONG);
   }
 
-  private void compareToUDFWeekOfYearLong(long t, int y) {
+  private void compareToUDFWeekOfYearLong(Timestamp t, int y) {
     UDFWeekOfYear udf = new UDFWeekOfYear();
-    TimestampWritable tsw = toTimestampWritable(t);
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
   private void verifyUDFWeekOfYear(VectorizedRowBatch batch, TestType testType) {
     VectorExpression udf;
-    if (testType == TestType.LONG2) {
-      udf = new VectorUDFWeekOfYearLong(0, 1);
+    if (testType == TestType.TIMESTAMP_LONG) {
+      udf = new VectorUDFWeekOfYearTimestamp(0, 1);
       udf.setInputTypes(VectorExpression.Type.TIMESTAMP);
     } else {
       udf = new VectorUDFWeekOfYearString(0, 1);
@@ -801,7 +849,7 @@ public class TestVectorTimestampExpressions {
 
     for (int i = 0; i < batch.size; i++) {
       if (batch.cols[in].noNulls || !batch.cols[in].isNull[i]) {
-        long t = readVectorElementAt(batch.cols[in], i);
+        Timestamp t = readVectorElementAt(batch.cols[in], i);
         long y = ((LongColumnVector) batch.cols[out]).vector[i];
         compareToUDFWeekOfYearLong(t, (int) y);
       } else {
@@ -811,7 +859,7 @@ public class TestVectorTimestampExpressions {
   }
 
   private void testVectorUDFWeekOfYear(TestType testType) {
-    VectorizedRowBatch batch = getVectorizedRowBatch(new long[] {0},
+    VectorizedRowBatch batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)},
             VectorizedRowBatch.DEFAULT_SIZE, testType);
     Assert.assertTrue(((LongColumnVector) batch.cols[1]).noNulls);
     Assert.assertFalse(((LongColumnVector) batch.cols[1]).isRepeating);
@@ -819,7 +867,7 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
     verifyUDFWeekOfYear(batch, testType);
 
-    long[] boundaries = getAllBoundaries();
+    Timestamp[] boundaries = getAllBoundaries();
     batch = getVectorizedRowBatch(boundaries, boundaries.length, testType);
     verifyUDFWeekOfYear(batch, testType);
     TestVectorizedRowBatch.addRandomNulls(batch.cols[0]);
@@ -827,8 +875,17 @@ public class TestVectorTimestampExpressions {
     TestVectorizedRowBatch.addRandomNulls(batch.cols[1]);
     verifyUDFWeekOfYear(batch, testType);
 
-    batch = getVectorizedRowBatch(new long[] {0}, 1, testType);
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
+    batch.cols[0].isRepeating = true;
+    verifyUDFWeekOfYear(batch, testType);
+    batch.cols[0].noNulls = false;
+    batch.cols[0].isNull[0] = true;
+    verifyUDFWeekOfYear(batch, testType);
+
+    batch = getVectorizedRowBatch(new Timestamp[] {new Timestamp(0)}, 1, testType);
     batch.cols[0].isRepeating = true;
+    batch.selectedInUse = true;
+    batch.selected = new int[] {42};
     verifyUDFWeekOfYear(batch, testType);
     batch.cols[0].noNulls = false;
     batch.cols[0].isNull[0] = true;
@@ -843,8 +900,8 @@ public class TestVectorTimestampExpressions {
   }
 
   @Test
-  public void testVectorUDFWeekOfYearLong() {
-    testVectorUDFWeekOfYear(TestType.LONG2);
+  public void testVectorUDFWeekOfYearTimestamp() {
+    testVectorUDFWeekOfYear(TestType.TIMESTAMP_LONG);
   }
 
   @Test
@@ -854,12 +911,13 @@ public class TestVectorTimestampExpressions {
 
   public static void main(String[] args) {
     TestVectorTimestampExpressions self = new TestVectorTimestampExpressions();
-    self.testVectorUDFYearLong();
-    self.testVectorUDFMonthLong();
-    self.testVectorUDFDayOfMonthLong();
-    self.testVectorUDFHourLong();
-    self.testVectorUDFWeekOfYearLong();
-    self.testVectorUDFUnixTimeStampLong();
+    self.testVectorUDFYearTimestamp();
+    self.testVectorUDFMonthTimestamp();
+    self.testVectorUDFDayOfMonthTimestamp();
+    self.testVectorUDFHourTimestamp();
+    self.testVectorUDFWeekOfYearTimestamp();
+    self.testVectorUDFUnixTimeStampTimestamp();
+
     self.testVectorUDFYearString();
     self.testVectorUDFMonthString();
     self.testVectorUDFDayOfMonthString();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index 0e23680..1e41fce 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -23,19 +23,29 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.UnsupportedEncodingException;
+import java.math.BigDecimal;
+import java.math.MathContext;
+import java.math.RoundingMode;
+import java.sql.Timestamp;
 import java.util.Arrays;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
 
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.junit.Test;
 
 /**
@@ -43,9 +53,6 @@ import org.junit.Test;
  */
 public class TestVectorTypeCasts {
 
-  // Number of nanoseconds in one second
-  private static final long NANOS_PER_SECOND = 1000000000;
-
   @Test
   public void testVectorCastLongToDouble() {
     VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchLongInDoubleOut();
@@ -79,13 +86,13 @@ public class TestVectorTypeCasts {
 
   @Test
   public void testCastDoubleToTimestamp() {
-    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchDoubleInLongOut();
-    LongColumnVector resultV = (LongColumnVector) b.cols[1];
+    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchDoubleInTimestampOut();
+    TimestampColumnVector resultV = (TimestampColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    VectorExpression expr = new CastDoubleToTimestampViaDoubleToLong(0, 1);
+    VectorExpression expr = new CastDoubleToTimestamp(0, 1);
     expr.evaluate(b);
-    Assert.assertEquals(0, resultV.vector[3]);
-    Assert.assertEquals((long) (0.5d * NANOS_PER_SECOND), resultV.vector[4]);
+    Assert.assertEquals(0.0, TimestampWritable.getDouble(resultV.asScratchTimestamp(3)));
+    Assert.assertEquals(0.5d, TimestampWritable.getDouble(resultV.asScratchTimestamp(4)));
   }
 
   @Test
@@ -103,39 +110,51 @@ public class TestVectorTypeCasts {
 
   @Test
   public void testCastLongToTimestamp() {
-    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
-    LongColumnVector resultV = (LongColumnVector) b.cols[1];
+    long[] longValues = new long[500];
+    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchLongInTimestampOut(longValues);
+    TimestampColumnVector resultV = (TimestampColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    VectorExpression expr = new CastLongToTimestampViaLongToLong(0, 1);
+    VectorExpression expr = new CastLongToTimestamp(0, 1);
     expr.evaluate(b);
-    Assert.assertEquals(-2 * NANOS_PER_SECOND, resultV.vector[0]);
-    Assert.assertEquals(2 * NANOS_PER_SECOND, resultV.vector[1]);
+    for (int i = 0; i < longValues.length; i++) {
+      Timestamp timestamp = resultV.asScratchTimestamp(i);
+      long actual = TimestampWritable.getLong(timestamp);
+      assertEquals(actual, longValues[i]);
+    }
   }
 
   @Test
   public void testCastTimestampToLong() {
-    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchLongInLongOut();
-    LongColumnVector inV = (LongColumnVector) b.cols[0];
-    inV.vector[0] = NANOS_PER_SECOND;  // Make one entry produce interesting result
-      // (1 sec after epoch).
-
+    long[] longValues = new long[500];
+    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchTimestampInLongOut(longValues);
+    TimestampColumnVector inV = (TimestampColumnVector) b.cols[0];
     LongColumnVector resultV = (LongColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    VectorExpression expr = new CastTimestampToLongViaLongToLong(0, 1);
+    VectorExpression expr = new CastTimestampToLong(0, 1);
     expr.evaluate(b);
-    Assert.assertEquals(1, resultV.vector[0]);
+    for (int i = 0; i < longValues.length; i++) {
+      long actual = resultV.vector[i];
+      long timestampLong = inV.getTimestampAsLong(i);
+      if (actual != timestampLong) {
+        assertTrue(false);
+      }
+    }
   }
 
   @Test
   public void testCastTimestampToDouble() {
-    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchLongInDoubleOut();
-    LongColumnVector inV = (LongColumnVector) b.cols[0];
+    double[] doubleValues = new double[500];
+    VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchTimestampInDoubleOut(doubleValues);
+    TimestampColumnVector inV = (TimestampColumnVector) b.cols[0];
     DoubleColumnVector resultV = (DoubleColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    VectorExpression expr = new CastTimestampToDoubleViaLongToDouble(0, 1);
+    VectorExpression expr = new CastTimestampToDouble(0, 1);
     expr.evaluate(b);
-    Assert.assertEquals(-1E-9D , resultV.vector[1]);
-    Assert.assertEquals(1E-9D, resultV.vector[3]);
+    for (int i = 0; i < doubleValues.length; i++) {
+      double actual = resultV.vector[i];
+      double doubleValue = TimestampWritable.getDouble(inV.asScratchTimestamp(i));
+      assertEquals(actual, doubleValue, 0.000000001F);
+    }
   }
 
   public byte[] toBytes(String s) {
@@ -356,13 +375,19 @@ public class TestVectorTypeCasts {
 
   @Test
   public void testCastDecimalToTimestamp() {
-    VectorizedRowBatch b = getBatchDecimalLong2();
+    double[] doubleValues = new double[500];
+    VectorizedRowBatch b = getBatchDecimalTimestamp(doubleValues);
     VectorExpression expr = new CastDecimalToTimestamp(0, 1);
     expr.evaluate(b);
-    LongColumnVector r = (LongColumnVector) b.cols[1];
-    assertEquals(1111111111L, r.vector[0]);
-    assertEquals(-2222222222L, r.vector[1]);
-    assertEquals(31536000999999999L, r.vector[2]);
+    TimestampColumnVector r = (TimestampColumnVector) b.cols[1];
+    for (int i = 0; i < doubleValues.length; i++) {
+      Timestamp timestamp = r.asScratchTimestamp(i);
+      double asDouble = TimestampWritable.getDouble(timestamp);
+      double expectedDouble = doubleValues[i];
+      if (expectedDouble != asDouble) {
+        assertTrue(false);
+      }
+    }
   }
 
   private VectorizedRowBatch getBatchDecimalLong2() {
@@ -381,6 +406,28 @@ public class TestVectorTypeCasts {
     return b;
   }
 
+  private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) {
+    VectorizedRowBatch b = new VectorizedRowBatch(2);
+    DecimalColumnVector dv;
+    b.cols[0] = dv = new DecimalColumnVector(doubleValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
+    b.cols[1] = new TimestampColumnVector(doubleValues.length);
+    dv.noNulls = true;
+    Random r = new Random(94830);
+    for (int i = 0; i < doubleValues.length; i++) {
+      long millis = RandomTypeUtil.randomMillis(r);
+      Timestamp ts = new Timestamp(millis);
+      int nanos = RandomTypeUtil.randomNanos(r);
+      ts.setNanos(nanos);
+      TimestampWritable tsw = new TimestampWritable(ts);
+      double asDouble = tsw.getDouble();
+      doubleValues[i] = asDouble;
+      HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
+      dv.set(i, hiveDecimal);
+    }
+    b.size = doubleValues.length;
+    return b;
+  }
+
   @Test
   public void testCastLongToDecimal() {
     VectorizedRowBatch b = getBatchLongDecimal();
@@ -403,6 +450,47 @@ public class TestVectorTypeCasts {
     return b;
   }
 
+
+  public static final long NANOSECONDS_PER_SECOND = TimeUnit.SECONDS.toNanos(1);
+  public static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1);
+  public static final long NANOSECONDS_PER_MILLISSECOND = TimeUnit.MILLISECONDS.toNanos(1);
+
+  private VectorizedRowBatch getBatchTimestampDecimal(HiveDecimal[] hiveDecimalValues) {
+    Random r = new Random(994);
+    VectorizedRowBatch b = new VectorizedRowBatch(2);
+    TimestampColumnVector tcv;
+    b.cols[0] = tcv = new TimestampColumnVector(hiveDecimalValues.length);
+    b.cols[1] = new DecimalColumnVector(hiveDecimalValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
+    for (int i = 0; i < hiveDecimalValues.length; i++) {
+      int optionalNanos = 0;
+      switch (r.nextInt(4)) {
+      case 0:
+        // No nanos.
+        break;
+      case 1:
+        optionalNanos = r.nextInt((int) NANOSECONDS_PER_SECOND);
+        break;
+      case 2:
+        // Limit to milliseconds only...
+        optionalNanos = r.nextInt((int) MILLISECONDS_PER_SECOND) * (int) NANOSECONDS_PER_MILLISSECOND;
+        break;
+      case 3:
+        // Limit to below milliseconds only...
+        optionalNanos = r.nextInt((int) NANOSECONDS_PER_MILLISSECOND);
+        break;
+      }
+      long millis = RandomTypeUtil.randomMillis(r);
+      Timestamp ts = new Timestamp(millis);
+      ts.setNanos(optionalNanos);
+      TimestampWritable tsw = new TimestampWritable(ts);
+      hiveDecimalValues[i] = tsw.getHiveDecimal();
+
+      tcv.set(i, ts);
+    }
+    b.size = hiveDecimalValues.length;
+    return b;
+  }
+
   @Test
   public void testCastDoubleToDecimal() {
     VectorizedRowBatch b = getBatchDoubleDecimal();
@@ -410,9 +498,18 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
 
-    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.0")));
-    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.0")));
-    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("99999999999999")));
+    HiveDecimal hd0 = HiveDecimal.create("0.0");
+    if (!hd0.equals(r.vector[0].getHiveDecimal())) {
+      assertTrue(false);
+    }
+    HiveDecimal hd1 = HiveDecimal.create("-1.0");
+    if (!hd1.equals(r.vector[1].getHiveDecimal())) {
+      assertTrue(false);
+    }
+    HiveDecimal hd2 = HiveDecimal.create("99999999999999");
+    if (!hd2.equals(r.vector[2].getHiveDecimal())) {
+      assertTrue(false);
+    }
   }
 
   private VectorizedRowBatch getBatchDoubleDecimal() {
@@ -466,25 +563,37 @@ public class TestVectorTypeCasts {
 
     // The input timestamps are stored as long values
     // measured in nanoseconds from the epoch.
-    VectorizedRowBatch b = getBatchLongDecimal();
+    HiveDecimal[] hiveDecimalValues = new HiveDecimal[500];
+    VectorizedRowBatch b = getBatchTimestampDecimal(hiveDecimalValues);
     VectorExpression expr = new CastTimestampToDecimal(0, 1);
-    LongColumnVector inL = (LongColumnVector) b.cols[0];
-    inL.vector[1] = -1990000000L;
+    TimestampColumnVector inT = (TimestampColumnVector) b.cols[0];
     expr.evaluate(b);
     DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
-    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.00")));
-    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.99")));
-    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("100000.00")));
+    for (int i = 0; i < hiveDecimalValues.length; i++) {
+      HiveDecimal hiveDecimal = r.vector[i].getHiveDecimal();
+      HiveDecimal expectedHiveDecimal = hiveDecimalValues[i];
+      if (!hiveDecimal.equals(expectedHiveDecimal)) {
+        assertTrue(false);
+      }
+    }
 
     // Try again with a value that won't fit in 5 digits, to make
     // sure that NULL is produced.
-    b = getBatchLongDecimalPrec5Scale2();
+    b.cols[1] = r = new DecimalColumnVector(hiveDecimalValues.length, 5, 2);
     expr.evaluate(b);
     r = (DecimalColumnVector) b.cols[1];
-    assertFalse(r.noNulls);
-    assertFalse(r.isNull[0]);
-    assertFalse(r.isNull[1]);
-    assertTrue(r.isNull[2]);
+    for (int i = 0; i < hiveDecimalValues.length; i++) {
+      HiveDecimal hiveDecimal = r.vector[i].getHiveDecimal();
+      HiveDecimal expectedHiveDecimal = hiveDecimalValues[i];
+      if (HiveDecimal.enforcePrecisionScale(expectedHiveDecimal, 5, 2) == null) {
+        assertTrue(r.isNull[i]);
+      } else {
+        assertTrue(!r.isNull[i]);
+        if (!hiveDecimal.equals(expectedHiveDecimal)) {
+          assertTrue(false);
+        }
+      }
+    }
   }
 
   /* This batch has output decimal column precision 5 and scale 2.
@@ -503,30 +612,6 @@ public class TestVectorTypeCasts {
     return b;
   }
 
-  /*
-  @Test
-  public void testCastDecimalToDecimal() {
-
-    // test casting from one precision and scale to another.
-    VectorizedRowBatch b = getBatchDecimalDecimal();
-    VectorExpression expr = new CastDecimalToDecimal(0, 1);
-    expr.evaluate(b);
-    DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
-    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("10.00", (short) 2)));
-    assertFalse(r.noNulls);
-    assertTrue(r.isNull[1]);
-
-    // test an increase in precision/scale
-    b = getBatchDecimalDecimal();
-    expr = new CastDecimalToDecimal(1, 0);
-    expr.evaluate(b);
-    r = (DecimalColumnVector) b.cols[0];
-    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("100.01", (short) 4)));
-    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-200.02", (short) 4)));
-    assertTrue(r.noNulls);
-  }
-  */
-
   private VectorizedRowBatch getBatchDecimalDecimal() {
     VectorizedRowBatch b = new VectorizedRowBatch(2);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
index bbda9a3..98849c3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
@@ -101,16 +101,16 @@ public class FakeVectorRowBatchFromObjectIterables extends FakeVectorRowBatchBas
           }
         };
       } else if (types[i].equalsIgnoreCase("timestamp")) {
-        batch.cols[i] = new LongColumnVector(batchSize);
+        batch.cols[i] = new TimestampColumnVector(batchSize);
         columnAssign[i] = new ColumnVectorAssign() {
           @Override
           public void assign(
               ColumnVector columnVector,
               int row,
               Object value) {
-            LongColumnVector lcv = (LongColumnVector) columnVector;
+            TimestampColumnVector lcv = (TimestampColumnVector) columnVector;
             Timestamp t = (Timestamp) value;
-            lcv.vector[row] = TimestampUtils.getTimeNanoSec(t);
+            lcv.set(row, t);
           }
         };
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
index 5d79f9c..84717b1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
@@ -18,13 +18,15 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.util;
 
+import java.sql.Timestamp;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -77,6 +79,37 @@ public class VectorizedRowGroupGenUtil {
     return lcv;
   }
 
+  public static TimestampColumnVector generateTimestampColumnVector(
+      boolean nulls, boolean repeating, int size, Random rand, Timestamp[] timestampValues) {
+    TimestampColumnVector tcv = new TimestampColumnVector(size);
+
+    tcv.noNulls = !nulls;
+    tcv.isRepeating = repeating;
+
+    Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand);
+
+    int nullFrequency = generateNullFrequency(rand);
+
+    for(int i = 0; i < size; i++) {
+      if(nulls && (repeating || i % nullFrequency == 0)) {
+        tcv.isNull[i] = true;
+        tcv.setNullValue(i);
+        timestampValues[i] = null;
+      }else {
+        tcv.isNull[i] = false;
+        if (!repeating) {
+          Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
+          tcv.set(i,  randomTimestamp);
+          timestampValues[i] = randomTimestamp;
+        } else {
+          tcv.set(i, repeatingTimestamp);
+          timestampValues[i] = repeatingTimestamp;
+        }
+      }
+    }
+    return tcv;
+  }
+
   public static DoubleColumnVector generateDoubleColumnVector(boolean nulls,
       boolean repeating, int size, Random rand) {
     DoubleColumnVector dcv = new DoubleColumnVector(size);

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index a345884..c0d912d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.io.AcidInputFormat;
@@ -1779,7 +1780,7 @@ public class TestInputOutputFormat {
     BytesColumnVector stringColumn = (BytesColumnVector) value.cols[7];
     DecimalColumnVector decimalColumn = (DecimalColumnVector) value.cols[8];
     LongColumnVector dateColumn = (LongColumnVector) value.cols[9];
-    LongColumnVector timestampColumn = (LongColumnVector) value.cols[10];
+    TimestampColumnVector timestampColumn = (TimestampColumnVector) value.cols[10];
     for(int i=0; i < 100; i++) {
       assertEquals("checking boolean " + i, i % 2 == 0 ? 1 : 0,
           booleanColumn.vector[i]);
@@ -1800,8 +1801,8 @@ public class TestInputOutputFormat {
       assertEquals("checking date " + i, i, dateColumn.vector[i]);
       long millis = (long) i * MILLIS_IN_DAY;
       millis -= LOCAL_TIMEZONE.getOffset(millis);
-      assertEquals("checking timestamp " + i, millis * 1000000L,
-          timestampColumn.vector[i]);
+      assertEquals("checking timestamp " + i, millis,
+          timestampColumn.getTime(i));
     }
     assertEquals(false, reader.next(key, value));
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index 137c39f..e526913 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -525,7 +525,10 @@ public class TestOrcFile {
     int idx = 0;
     while (rows.hasNext()) {
       Object row = rows.next(null);
-      assertEquals(tslist.get(idx++).getNanos(), ((TimestampWritable) row).getNanos());
+      Timestamp tlistTimestamp = tslist.get(idx++);
+      if (tlistTimestamp.getNanos() != ((TimestampWritable) row).getNanos()) {
+        assertTrue(false);
+      }
     }
     assertEquals(1, OrcUtils.getFlattenedColumnsCount(inspector));
     boolean[] expected = new boolean[] {false};


[08/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java
new file mode 100644
index 0000000..b3e1fae
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFStdSampTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "stddev_samp",
+    value = "_FUNC_(x) - Returns the sample standard deviation of a set of numbers (vectorized, double)")
+public class VectorUDAFStdSampTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFStdSampTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFStdSampTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java
new file mode 100644
index 0000000..970ec22
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFVarPopTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "variance, var_pop",
+    value = "_FUNC_(x) - Returns the variance of a set of numbers (vectorized, timestamp)")
+public class VectorUDAFVarPopTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFVarPopTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFVarPopTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java
new file mode 100644
index 0000000..9af1a28
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFVarSampTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "var_samp",
+    value = "_FUNC_(x) - Returns the sample variance of a set of numbers (vectorized, double)")
+public class VectorUDAFVarSampTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFVarSampTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFVarSampTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
index d3a0f9f..20cfb89 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
@@ -297,23 +297,14 @@ public class VectorUDFAdaptor extends VectorExpression {
         lv.vector[i] = ((WritableByteObjectInspector) outputOI).get(value);
       }
     } else if (outputOI instanceof WritableTimestampObjectInspector) {
-      LongColumnVector lv = (LongColumnVector) colVec;
+      TimestampColumnVector tv = (TimestampColumnVector) colVec;
       Timestamp ts;
       if (value instanceof Timestamp) {
         ts = (Timestamp) value;
       } else {
         ts = ((WritableTimestampObjectInspector) outputOI).getPrimitiveJavaObject(value);
       }
-      /* Calculate the number of nanoseconds since the epoch as a long integer. By convention
-       * that is how Timestamp values are operated on in a vector.
-       */
-      long l = ts.getTime() * 1000000  // Shift the milliseconds value over by 6 digits
-                                       // to scale for nanosecond precision.
-                                       // The milliseconds digits will by convention be all 0s.
-            + ts.getNanos() % 1000000; // Add on the remaining nanos.
-                                       // The % 1000000 operation removes the ms values
-                                       // so that the milliseconds are not counted twice.
-      lv.vector[i] = l;
+      tv.set(i, ts);
     } else if (outputOI instanceof WritableDateObjectInspector) {
       LongColumnVector lv = (LongColumnVector) colVec;
       Date ts;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
index 96df394..146b3f0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
@@ -997,6 +998,7 @@ public class TreeReaderFactory {
     private final TimeZone readerTimeZone;
     private TimeZone writerTimeZone;
     private boolean hasSameTZRules;
+    private TimestampWritable scratchTimestampWritable;
 
     TimestampTreeReader(int columnId, boolean skipCorrupt) throws IOException {
       this(columnId, null, null, null, null, skipCorrupt);
@@ -1130,24 +1132,26 @@ public class TreeReaderFactory {
 
     @Override
     public Object nextVector(Object previousVector, long batchSize) throws IOException {
-      final LongColumnVector result;
+      final TimestampColumnVector result;
       if (previousVector == null) {
-        result = new LongColumnVector();
+        result = new TimestampColumnVector();
       } else {
-        result = (LongColumnVector) previousVector;
+        result = (TimestampColumnVector) previousVector;
       }
 
       result.reset();
-      Object obj = null;
+      if (scratchTimestampWritable == null) {
+        scratchTimestampWritable = new TimestampWritable();
+      }
+      Object obj;
       for (int i = 0; i < batchSize; i++) {
-        obj = next(obj);
+        obj = next(scratchTimestampWritable);
         if (obj == null) {
           result.noNulls = false;
           result.isNull[i] = true;
         } else {
           TimestampWritable writable = (TimestampWritable) obj;
-          Timestamp timestamp = writable.getTimestamp();
-          result.vector[i] = TimestampUtils.getTimeNanoSec(timestamp);
+          result.set(i, writable.getTimestamp());
         }
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
index 3c0d590..2d73431 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 import java.util.ArrayList;
@@ -278,9 +279,10 @@ public class TypeDescription {
       case SHORT:
       case INT:
       case LONG:
-      case TIMESTAMP:
       case DATE:
         return new LongColumnVector();
+      case TIMESTAMP:
+        return new TimestampColumnVector();
       case FLOAT:
       case DOUBLE:
         return new DoubleColumnVector();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
index 21e6ff7..3ff6acf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "4. A day-time interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  30")
-@VectorizedExpressions({VectorUDFDayOfMonthLong.class, VectorUDFDayOfMonthString.class})
+@VectorizedExpressions({VectorUDFDayOfMonthDate.class, VectorUDFDayOfMonthString.class, VectorUDFDayOfMonthTimestamp.class})
 public class UDFDayOfMonth extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
index 835cecc..f6f9c93 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourTimestamp;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  12\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  12")
-@VectorizedExpressions({VectorUDFHourLong.class, VectorUDFHourString.class})
+@VectorizedExpressions({VectorUDFHourDate.class, VectorUDFHourString.class, VectorUDFHourTimestamp.class})
 public class UDFHour extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
index a9f5393..606b6d6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteTimestamp;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  58\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  58")
-@VectorizedExpressions({VectorUDFMinuteLong.class, VectorUDFMinuteString.class})
+@VectorizedExpressions({VectorUDFMinuteDate.class, VectorUDFMinuteString.class, VectorUDFMinuteTimestamp.class})
 public class UDFMinute extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
index 3365804..1975535 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,12 +48,12 @@ import org.apache.hadoop.io.Text;
     + "4. A year-month interval value"
     + "Example:\n"
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  7")
-@VectorizedExpressions({VectorUDFMonthLong.class, VectorUDFMonthString.class})
+@VectorizedExpressions({VectorUDFMonthDate.class, VectorUDFMonthString.class, VectorUDFMonthTimestamp.class})
 public class UDFMonth extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();
 
-  private IntWritable result = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
   public UDFMonth() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
index e7c3d67..11764cf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
@@ -27,8 +27,9 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondTimestamp;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -50,7 +51,7 @@ import org.apache.hive.common.util.DateUtils;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  59\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  59")
-@VectorizedExpressions({VectorUDFSecondLong.class, VectorUDFSecondString.class})
+@VectorizedExpressions({VectorUDFSecondDate.class, VectorUDFSecondString.class, VectorUDFSecondTimestamp.class})
 public class UDFSecond extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat(
       "yyyy-MM-dd HH:mm:ss");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 9786636..17b892c 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDateToBooleanViaLongToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToBooleanViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToBoolean;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -45,7 +45,7 @@ import org.apache.hadoop.io.Text;
  *
  */
 @VectorizedExpressions({CastLongToBooleanViaLongToLong.class,
-  CastDateToBooleanViaLongToLong.class, CastTimestampToBooleanViaLongToLong.class,
+  CastDateToBooleanViaLongToLong.class, CastTimestampToBoolean.class,
   CastDoubleToBooleanViaDoubleToLong.class, CastDecimalToBoolean.class})
 public class UDFToBoolean extends UDF {
   private final BooleanWritable booleanWritable = new BooleanWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
index d274531..159dd0f 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -40,7 +40,7 @@ import org.apache.hadoop.io.Text;
  * UDFToByte.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToByte extends UDF {
   private final ByteWritable byteWritable = new ByteWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
index 8084537..5763947 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToDoubleViaLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToDouble;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -38,7 +38,7 @@ import org.apache.hadoop.io.Text;
  * UDFToDouble.
  *
  */
-@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class,
+@VectorizedExpressions({CastTimestampToDouble.class, CastLongToDouble.class,
     CastDecimalToDouble.class})
 public class UDFToDouble extends UDF {
   private final DoubleWritable doubleWritable = new DoubleWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
index 129da43..e2183f4 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToDoubleViaLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToDouble;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -39,7 +39,7 @@ import org.apache.hadoop.io.Text;
  * UDFToFloat.
  *
  */
-@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class,
+@VectorizedExpressions({CastTimestampToDouble.class, CastLongToDouble.class,
     CastDecimalToDouble.class})
 public class UDFToFloat extends UDF {
   private final FloatWritable floatWritable = new FloatWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
index b669754..5f5d1fe 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.ql.io.RecordIdentifier;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.Text;
  * UDFToInteger.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToInteger extends UDF {
   private final IntWritable intWritable = new IntWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
index 04ff7cf..3eeabea 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -40,7 +40,7 @@ import org.apache.hadoop.io.Text;
  * UDFToLong.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToLong extends UDF {
   private final LongWritable longWritable = new LongWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
index 5315552..b9065b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.Text;
  * UDFToShort.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToShort extends UDF {
   ShortWritable shortWritable = new ShortWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
index f076d1d..d65b3ab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -44,7 +45,7 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2008-02-20') FROM src LIMIT 1;\n"
     + "  8\n"
     + "  > SELECT _FUNC_('1980-12-31 12:59:59') FROM src LIMIT 1;\n" + "  1")
-@VectorizedExpressions({VectorUDFWeekOfYearLong.class, VectorUDFWeekOfYearString.class})
+@VectorizedExpressions({VectorUDFWeekOfYearDate.class, VectorUDFWeekOfYearString.class, VectorUDFWeekOfYearTimestamp.class})
 public class UDFWeekOfYear extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
index 34b0c47..92ee700 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "4. A year-month interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  2009")
-@VectorizedExpressions({VectorUDFYearLong.class, VectorUDFYearString.class})
+@VectorizedExpressions({VectorUDFYearDate.class, VectorUDFYearString.class, VectorUDFYearTimestamp.class})
 public class UDFYear extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index 10e648e..8c376a0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -50,7 +51,7 @@ import org.apache.hadoop.io.Text;
     extended = "Example:\n "
         + "  > SELECT _FUNC_('2009-07-30 04:17:52') FROM src LIMIT 1;\n"
         + "  '2009-07-30'")
-@VectorizedExpressions({VectorUDFDateString.class, VectorUDFDateLong.class})
+@VectorizedExpressions({VectorUDFDateString.class, VectorUDFDateLong.class, VectorUDFDateTimestamp.class})
 public class GenericUDFDate extends GenericUDF {
   private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient TimestampConverter timestampConverter;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
index 568fd46..b5b7153 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnLongScalar;
@@ -42,12 +42,20 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarLon
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeColumnColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeColumnScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeScalarColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeScalarScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarCharScalar;
@@ -76,6 +84,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStri
   IfExprLongScalarDoubleColumn.class, IfExprDoubleScalarLongColumn.class,
   IfExprLongScalarLongScalar.class, IfExprDoubleScalarDoubleScalar.class,
   IfExprLongScalarDoubleScalar.class, IfExprDoubleScalarLongScalar.class,
+
   IfExprStringGroupColumnStringGroupColumn.class,
   IfExprStringGroupColumnStringScalar.class,
   IfExprStringGroupColumnCharScalar.class, IfExprStringGroupColumnVarCharScalar.class,
@@ -83,7 +92,12 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStri
   IfExprCharScalarStringGroupColumn.class, IfExprVarCharScalarStringGroupColumn.class,
   IfExprStringScalarStringScalar.class,
   IfExprStringScalarCharScalar.class, IfExprStringScalarVarCharScalar.class,
-  IfExprCharScalarStringScalar.class, IfExprVarCharScalarStringScalar.class
+  IfExprCharScalarStringScalar.class, IfExprVarCharScalarStringScalar.class,
+
+  IfExprIntervalDayTimeColumnColumn.class, IfExprIntervalDayTimeColumnScalar.class,
+  IfExprIntervalDayTimeScalarColumn.class, IfExprIntervalDayTimeScalarScalar.class,
+  IfExprTimestampColumnColumn.class, IfExprTimestampColumnScalar.class,
+  IfExprTimestampScalarColumn.class, IfExprTimestampScalarScalar.class,
 })
 public class GenericUDFIf extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;


[13/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
new file mode 100644
index 0000000..c0dd5ed
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
@@ -0,0 +1,395 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec.vector;
+
+import java.sql.Timestamp;
+import java.util.Arrays;
+
+import org.apache.hadoop.io.Writable;
+
+/**
+ * This class represents a nullable timestamp column vector capable of handing a wide range of
+ * timestamp values.
+ *
+ * We store the 2 (value) fields of a Timestamp class in primitive arrays.
+ *
+ * We do this to avoid an array of Java Timestamp objects which would have poor storage
+ * and memory access characteristics.
+ *
+ * Generally, the caller will fill in a scratch timestamp object with values from a row, work
+ * using the scratch timestamp, and then perhaps update the column vector row with a result.
+ */
+public class TimestampColumnVector extends ColumnVector {
+
+  /*
+   * The storage arrays for this column vector corresponds to the storage of a Timestamp:
+   */
+  public long[] time;
+      // The values from Timestamp.getTime().
+
+  public int[] nanos;
+      // The values from Timestamp.getNanos().
+
+  /*
+   * Scratch objects.
+   */
+  private final Timestamp scratchTimestamp;
+
+  private Writable scratchWritable;
+      // Supports keeping a TimestampWritable object without having to import that definition...
+
+  /**
+   * Use this constructor by default. All column vectors
+   * should normally be the default size.
+   */
+  public TimestampColumnVector() {
+    this(VectorizedRowBatch.DEFAULT_SIZE);
+  }
+
+  /**
+   * Don't use this except for testing purposes.
+   *
+   * @param len the number of rows
+   */
+  public TimestampColumnVector(int len) {
+    super(len);
+
+    time = new long[len];
+    nanos = new int[len];
+
+    scratchTimestamp = new Timestamp(0);
+
+    scratchWritable = null;     // Allocated by caller.
+  }
+
+  /**
+   * Return the number of rows.
+   * @return
+   */
+  public int getLength() {
+    return time.length;
+  }
+
+  /**
+   * Return a row's Timestamp.getTime() value.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public long getTime(int elementNum) {
+    return time[elementNum];
+  }
+
+  /**
+   * Return a row's Timestamp.getNanos() value.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public int getNanos(int elementNum) {
+    return nanos[elementNum];
+  }
+
+  /**
+   * Set a Timestamp object from a row of the column.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param timestamp
+   * @param elementNum
+   */
+  public void timestampUpdate(Timestamp timestamp, int elementNum) {
+    timestamp.setTime(time[elementNum]);
+    timestamp.setNanos(nanos[elementNum]);
+  }
+
+  /**
+   * Return the scratch Timestamp object set from a row.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @return
+   */
+  public Timestamp asScratchTimestamp(int elementNum) {
+    scratchTimestamp.setTime(time[elementNum]);
+    scratchTimestamp.setNanos(nanos[elementNum]);
+    return scratchTimestamp;
+  }
+
+  /**
+   * Return the scratch timestamp (contents undefined).
+   * @return
+   */
+  public Timestamp getScratchTimestamp() {
+    return scratchTimestamp;
+  }
+
+  /**
+   * Return a long representation of a Timestamp.
+   * @param elementNum
+   * @return
+   */
+  public long getTimestampAsLong(int elementNum) {
+    scratchTimestamp.setTime(time[elementNum]);
+    scratchTimestamp.setNanos(nanos[elementNum]);
+    return getTimestampAsLong(scratchTimestamp);
+  }
+
+  /**
+   * Return a long representation of a Timestamp.
+   * @param timestamp
+   * @return
+   */
+  public static long getTimestampAsLong(Timestamp timestamp) {
+    return millisToSeconds(timestamp.getTime());
+  }
+
+  // Copy of TimestampWritable.millisToSeconds
+  /**
+   * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of
+   * seconds. 500 would round to 0, -500 would round to -1.
+   */
+  private static long millisToSeconds(long millis) {
+    if (millis >= 0) {
+      return millis / 1000;
+    } else {
+      return (millis - 999) / 1000;
+    }
+  }
+
+  /**
+   * Return a double representation of a Timestamp.
+   * @param elementNum
+   * @return
+   */
+  public double getDouble(int elementNum) {
+    scratchTimestamp.setTime(time[elementNum]);
+    scratchTimestamp.setNanos(nanos[elementNum]);
+    return getDouble(scratchTimestamp);
+  }
+
+  /**
+   * Return a double representation of a Timestamp.
+   * @param elementNum
+   * @return
+   */
+  public static double getDouble(Timestamp timestamp) {
+    // Same algorithm as TimestampWritable (not currently import-able here).
+    double seconds, nanos;
+    seconds = millisToSeconds(timestamp.getTime());
+    nanos = timestamp.getNanos();
+    return seconds + nanos / 1000000000;
+  }
+
+  /**
+   * Compare row to Timestamp.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param elementNum
+   * @param timestamp
+   * @return -1, 0, 1 standard compareTo values.
+   */
+  public int compareTo(int elementNum, Timestamp timestamp) {
+    return asScratchTimestamp(elementNum).compareTo(timestamp);
+  }
+
+  /**
+   * Compare Timestamp to row.
+   * We assume the entry has already been NULL checked and isRepeated adjusted.
+   * @param timestamp
+   * @param elementNum
+   * @return -1, 0, 1 standard compareTo values.
+   */
+  public int compareTo(Timestamp timestamp, int elementNum) {
+    return timestamp.compareTo(asScratchTimestamp(elementNum));
+  }
+
+  /**
+   * Compare a row to another TimestampColumnVector's row.
+   * @param elementNum1
+   * @param timestampColVector2
+   * @param elementNum2
+   * @return
+   */
+  public int compareTo(int elementNum1, TimestampColumnVector timestampColVector2,
+      int elementNum2) {
+    return asScratchTimestamp(elementNum1).compareTo(
+        timestampColVector2.asScratchTimestamp(elementNum2));
+  }
+
+  /**
+   * Compare another TimestampColumnVector's row to a row.
+   * @param timestampColVector1
+   * @param elementNum1
+   * @param elementNum2
+   * @return
+   */
+  public int compareTo(TimestampColumnVector timestampColVector1, int elementNum1,
+      int elementNum2) {
+    return timestampColVector1.asScratchTimestamp(elementNum1).compareTo(
+        asScratchTimestamp(elementNum2));
+  }
+
+  @Override
+  public void setElement(int outElementNum, int inputElementNum, ColumnVector inputVector) {
+
+    TimestampColumnVector timestampColVector = (TimestampColumnVector) inputVector;
+
+    time[outElementNum] = timestampColVector.time[inputElementNum];
+    nanos[outElementNum] = timestampColVector.nanos[inputElementNum];
+  }
+
+  // Simplify vector by brute-force flattening noNulls and isRepeating
+  // This can be used to reduce combinatorial explosion of code paths in VectorExpressions
+  // with many arguments.
+  public void flatten(boolean selectedInUse, int[] sel, int size) {
+    flattenPush();
+    if (isRepeating) {
+      isRepeating = false;
+      long repeatFastTime = time[0];
+      int repeatNanos = nanos[0];
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          time[i] = repeatFastTime;
+          nanos[i] = repeatNanos;
+        }
+      } else {
+        Arrays.fill(time, 0, size, repeatFastTime);
+        Arrays.fill(nanos, 0, size, repeatNanos);
+      }
+      flattenRepeatingNulls(selectedInUse, sel, size);
+    }
+    flattenNoNulls(selectedInUse, sel, size);
+  }
+
+  /**
+   * Set a row from a timestamp.
+   * We assume the entry has already been isRepeated adjusted.
+   * @param elementNum
+   * @param timestamp
+   */
+  public void set(int elementNum, Timestamp timestamp) {
+    this.time[elementNum] = timestamp.getTime();
+    this.nanos[elementNum] = timestamp.getNanos();
+  }
+
+  /**
+   * Set a row from the current value in the scratch timestamp.
+   * @param elementNum
+   */
+  public void setFromScratchTimestamp(int elementNum) {
+    this.time[elementNum] = scratchTimestamp.getTime();
+    this.nanos[elementNum] = scratchTimestamp.getNanos();
+  }
+
+  /**
+   * Set row to standard null value(s).
+   * We assume the entry has already been isRepeated adjusted.
+   * @param elementNum
+   */
+  public void setNullValue(int elementNum) {
+    time[elementNum] = 0;
+    nanos[elementNum] = 1;
+  }
+
+  // Copy the current object contents into the output. Only copy selected entries,
+  // as indicated by selectedInUse and the sel array.
+  public void copySelected(
+      boolean selectedInUse, int[] sel, int size, TimestampColumnVector output) {
+
+    // Output has nulls if and only if input has nulls.
+    output.noNulls = noNulls;
+    output.isRepeating = false;
+
+    // Handle repeating case
+    if (isRepeating) {
+      output.time[0] = time[0];
+      output.nanos[0] = nanos[0];
+      output.isNull[0] = isNull[0];
+      output.isRepeating = true;
+      return;
+    }
+
+    // Handle normal case
+
+    // Copy data values over
+    if (selectedInUse) {
+      for (int j = 0; j < size; j++) {
+        int i = sel[j];
+        output.time[i] = time[i];
+        output.nanos[i] = nanos[i];
+      }
+    }
+    else {
+      System.arraycopy(time, 0, output.time, 0, size);
+      System.arraycopy(nanos, 0, output.nanos, 0, size);
+    }
+
+    // Copy nulls over if needed
+    if (!noNulls) {
+      if (selectedInUse) {
+        for (int j = 0; j < size; j++) {
+          int i = sel[j];
+          output.isNull[i] = isNull[i];
+        }
+      }
+      else {
+        System.arraycopy(isNull, 0, output.isNull, 0, size);
+      }
+    }
+  }
+
+  /**
+   * Fill all the vector entries with a timestamp.
+   * @param timestamp
+   */
+  public void fill(Timestamp timestamp) {
+    noNulls = true;
+    isRepeating = true;
+    time[0] = timestamp.getTime();
+    nanos[0] = timestamp.getNanos();
+  }
+
+  /**
+   * Return a convenience writable object stored by this column vector.
+   * Supports keeping a TimestampWritable object without having to import that definition...
+   * @return
+   */
+  public Writable getScratchWritable() {
+    return scratchWritable;
+  }
+
+  /**
+   * Set the convenience writable object stored by this column vector
+   * @param scratchWritable
+   */
+  public void setScratchWritable(Writable scratchWritable) {
+    this.scratchWritable = scratchWritable;
+  }
+
+  @Override
+  public void stringifyValue(StringBuilder buffer, int row) {
+    if (isRepeating) {
+      row = 0;
+    }
+    if (noNulls || !isNull[row]) {
+      scratchTimestamp.setTime(time[row]);
+      scratchTimestamp.setNanos(nanos[row]);
+      buffer.append(scratchTimestamp.toString());
+    } else {
+      buffer.append("null");
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
index 95dbf8d..bb795fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
@@ -18,50 +18,32 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
-import java.sql.Timestamp;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public final class TimestampUtils {
 
-  /**
-   * Store the given timestamp in nanoseconds into the timestamp object.
-   * @param timeInNanoSec Given timestamp in nanoseconds
-   * @param t             The timestamp object
-   */
-  public static void assignTimeInNanoSec(long timeInNanoSec, Timestamp t) {
-    /*
-     * java.sql.Timestamp consists of a long variable to store milliseconds and an integer variable for nanoseconds.
-     * The long variable is used to store only the full seconds converted to millis. For example for 1234 milliseconds,
-     * 1000 is stored in the long variable, and 234000000 (234 converted to nanoseconds) is stored as nanoseconds.
-     * The negative timestamps are also supported, but nanoseconds must be positive therefore millisecond part is
-     * reduced by one second.
-     */
-    long integralSecInMillis = (timeInNanoSec / 1000000000) * 1000; // Full seconds converted to millis.
-    long nanos = timeInNanoSec % 1000000000; // The nanoseconds.
-    if (nanos < 0) {
-      nanos = 1000000000 + nanos; // The positive nano-part that will be added to milliseconds.
-      integralSecInMillis = ((timeInNanoSec / 1000000000) - 1) * 1000; // Reduce by one second.
-    }
-    t.setTime(integralSecInMillis);
-    t.setNanos((int) nanos);
-  }
-
-  public static long getTimeNanoSec(Timestamp t) {
-    long time = t.getTime();
-    int nanos = t.getNanos();
-    return (time * 1000000) + (nanos % 1000000);
-  }
+  static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1);
+  static final long NANOSECONDS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
 
-  public static long secondsToNanoseconds(long seconds) {
-    return seconds * 1000000000;
+  public static long daysToNanoseconds(long daysSinceEpoch) {
+    return DateWritable.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
   }
 
-  public static long doubleToNanoseconds(double d) {
-    return (long) (d * 1000000000);
+  public static TimestampWritable timestampColumnVectorWritable(
+      TimestampColumnVector timestampColVector, int elementNum,
+      TimestampWritable timestampWritable) {
+    timestampWritable.set(timestampColVector.asScratchTimestamp(elementNum));
+    return timestampWritable;
   }
 
-  public static long daysToNanoseconds(long daysSinceEpoch) {
-    return DateWritable.daysToMillis((int) daysSinceEpoch) * 1000000;
+  public static HiveIntervalDayTimeWritable intervalDayTimeColumnVectorWritable(
+      IntervalDayTimeColumnVector intervalDayTimeColVector, int elementNum,
+      HiveIntervalDayTimeWritable intervalDayTimeWritable) {
+    intervalDayTimeWritable.set(intervalDayTimeColVector.asScratchIntervalDayTime(elementNum));
+    return intervalDayTimeWritable;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
index 809d7d4..9502134 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
-import java.sql.Timestamp;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -228,7 +227,26 @@ public abstract class VectorAssignRow {
     }
   }
 
-  private class TimestampAssigner extends AbstractLongAssigner {
+  private abstract class AbstractTimestampAssigner extends Assigner {
+
+    protected TimestampColumnVector colVector;
+
+    AbstractTimestampAssigner(int columnIndex) {
+      super(columnIndex);
+    }
+
+    @Override
+    void setColumnVector(VectorizedRowBatch batch) {
+      colVector = (TimestampColumnVector) batch.cols[columnIndex];
+    }
+
+    @Override
+    void forgetColumnVector() {
+      colVector = null;
+    }
+  }
+
+  private class TimestampAssigner extends AbstractTimestampAssigner {
 
     TimestampAssigner(int columnIndex) {
       super(columnIndex);
@@ -239,9 +257,8 @@ public abstract class VectorAssignRow {
       if (object == null) {
         VectorizedBatchUtil.setNullColIsNullValue(colVector, batchIndex);
       } else {
-        TimestampWritable tw = (TimestampWritable) object;
-        Timestamp t = tw.getTimestamp();
-        vector[batchIndex] = TimestampUtils.getTimeNanoSec(t);
+        colVector.set(batchIndex, ((TimestampWritable) object).getTimestamp());
+        colVector.isNull[batchIndex] = false;
       }
     }
   }
@@ -260,11 +277,31 @@ public abstract class VectorAssignRow {
         HiveIntervalYearMonthWritable iymw = (HiveIntervalYearMonthWritable) object;
         HiveIntervalYearMonth iym = iymw.getHiveIntervalYearMonth();
         vector[batchIndex] = iym.getTotalMonths();
+        colVector.isNull[batchIndex] = false;
       }
     }
   }
 
-  private class IntervalDayTimeAssigner extends AbstractLongAssigner {
+  private abstract class AbstractIntervalDayTimeAssigner extends Assigner {
+
+    protected IntervalDayTimeColumnVector colVector;
+
+    AbstractIntervalDayTimeAssigner(int columnIndex) {
+      super(columnIndex);
+    }
+
+    @Override
+    void setColumnVector(VectorizedRowBatch batch) {
+      colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+    }
+
+    @Override
+    void forgetColumnVector() {
+      colVector = null;
+    }
+  }
+
+  private class IntervalDayTimeAssigner extends AbstractIntervalDayTimeAssigner {
 
     IntervalDayTimeAssigner(int columnIndex) {
       super(columnIndex);
@@ -277,7 +314,8 @@ public abstract class VectorAssignRow {
       } else {
         HiveIntervalDayTimeWritable idtw = (HiveIntervalDayTimeWritable) object;
         HiveIntervalDayTime idt = idtw.getHiveIntervalDayTime();
-        vector[batchIndex] = DateUtils.getIntervalDayTimeTotalNanos(idt);
+        colVector.set(batchIndex, idt);
+        colVector.isNull[batchIndex] = false;
       }
     }
   }
@@ -317,6 +355,7 @@ public abstract class VectorAssignRow {
       } else {
         FloatWritable fw = (FloatWritable) object;
         vector[batchIndex] = fw.get();
+        colVector.isNull[batchIndex] = false;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
index befe2fc..96b8f78 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
@@ -25,6 +25,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -165,6 +166,27 @@ public class VectorColumnAssignFactory {
     }
   }
 
+  private static abstract class VectorTimestampColumnAssign
+  extends VectorColumnAssignVectorBase<TimestampColumnVector> {
+
+    protected void assignTimestamp(Timestamp value, int index) {
+      outCol.set(index, value);
+    }
+    protected void assignTimestamp(TimestampWritable tw, int index) {
+      outCol.set(index, tw.getTimestamp());
+    }
+  }
+
+  private static abstract class VectorIntervalDayTimeColumnAssign
+  extends VectorColumnAssignVectorBase<IntervalDayTimeColumnVector> {
+
+    protected void assignIntervalDayTime(HiveIntervalDayTime value, int index) {
+      outCol.set(index, value);
+    }
+    protected void assignIntervalDayTime(HiveIntervalDayTimeWritable tw, int index) {
+      outCol.set(index, tw.getHiveIntervalDayTime());
+    }
+  }
 
   public static VectorColumnAssign[] buildAssigners(VectorizedRowBatch outputBatch)
       throws HiveException {
@@ -313,19 +335,17 @@ public class VectorColumnAssignFactory {
         }.init(outputBatch, (LongColumnVector) destCol);
         break;
       case TIMESTAMP:
-        outVCA = new VectorLongColumnAssign() {
+        outVCA = new VectorTimestampColumnAssign() {
           @Override
           public void assignObjectValue(Object val, int destIndex) throws HiveException {
             if (val == null) {
               assignNull(destIndex);
             }
             else {
-              TimestampWritable bw = (TimestampWritable) val;
-              Timestamp t = bw.getTimestamp();
-              assignLong(TimestampUtils.getTimeNanoSec(t), destIndex);
+              assignTimestamp((TimestampWritable) val, destIndex);
             }
           }
-        }.init(outputBatch, (LongColumnVector) destCol);
+        }.init(outputBatch, (TimestampColumnVector) destCol);
         break;
       case DATE:
         outVCA = new VectorLongColumnAssign() {
@@ -355,7 +375,7 @@ public class VectorColumnAssignFactory {
           }
         }.init(outputBatch, (LongColumnVector) destCol);
         break;
-      case INTERVAL_DAY_TIME:outVCA = new VectorLongColumnAssign() {
+      case INTERVAL_DAY_TIME:outVCA = new VectorIntervalDayTimeColumnAssign() {
         @Override
         public void assignObjectValue(Object val, int destIndex) throws HiveException {
           if (val == null) {
@@ -363,12 +383,12 @@ public class VectorColumnAssignFactory {
           }
           else {
             HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
-            assignLong(
-                DateUtils.getIntervalDayTimeTotalNanos(bw.getHiveIntervalDayTime()),
+            assignIntervalDayTime(
+                bw.getHiveIntervalDayTime(),
                 destIndex);
           }
         }
-      }.init(outputBatch, (LongColumnVector) destCol);
+      }.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
       break;
       default:
         throw new HiveException("Incompatible Long vector column and primitive category " +

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
index 8c4b6ea..935b47b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
@@ -20,7 +20,10 @@ package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.util.Arrays;
 
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 /**
  * Class to keep information on a set of typed vector columns.  Used by
@@ -52,6 +55,16 @@ public class VectorColumnSetInfo {
   protected int[] decimalIndices;
 
   /**
+   * indices of TIMESTAMP primitive keys.
+   */
+  protected int[] timestampIndices;
+
+  /**
+   * indices of INTERVAL_DAY_TIME primitive keys.
+   */
+  protected int[] intervalDayTimeIndices;
+
+  /**
    * Helper class for looking up a key value based on key index.
    */
   public class KeyLookupHelper {
@@ -59,11 +72,14 @@ public class VectorColumnSetInfo {
     public int doubleIndex;
     public int stringIndex;
     public int decimalIndex;
+    public int timestampIndex;
+    public int intervalDayTimeIndex;
 
     private static final int INDEX_UNUSED = -1;
 
     private void resetIndices() {
-        this.longIndex = this.doubleIndex = this.stringIndex = this.decimalIndex = INDEX_UNUSED;
+        this.longIndex = this.doubleIndex = this.stringIndex = this.decimalIndex =
+            timestampIndex = intervalDayTimeIndex = INDEX_UNUSED;
     }
     public void setLong(int index) {
       resetIndices();
@@ -84,6 +100,16 @@ public class VectorColumnSetInfo {
       resetIndices();
       this.decimalIndex = index;
     }
+
+    public void setTimestamp(int index) {
+      resetIndices();
+      this.timestampIndex= index;
+    }
+
+    public void setIntervalDayTime(int index) {
+      resetIndices();
+      this.intervalDayTimeIndex= index;
+    }
   }
 
   /**
@@ -98,6 +124,8 @@ public class VectorColumnSetInfo {
   protected int doubleIndicesIndex;
   protected int stringIndicesIndex;
   protected int decimalIndicesIndex;
+  protected int timestampIndicesIndex;
+  protected int intervalDayTimeIndicesIndex;
 
   protected VectorColumnSetInfo(int keyCount) {
     this.keyCount = keyCount;
@@ -112,33 +140,62 @@ public class VectorColumnSetInfo {
     stringIndicesIndex = 0;
     decimalIndices = new int[this.keyCount];
     decimalIndicesIndex = 0;
+    timestampIndices = new int[this.keyCount];
+    timestampIndicesIndex = 0;
+    intervalDayTimeIndices = new int[this.keyCount];
+    intervalDayTimeIndicesIndex = 0;
     indexLookup = new KeyLookupHelper[this.keyCount];
   }
 
   protected void addKey(String outputType) throws HiveException {
     indexLookup[addIndex] = new KeyLookupHelper();
-    if (VectorizationContext.isIntFamily(outputType) ||
-        VectorizationContext.isDatetimeFamily(outputType)) {
+
+    String typeName = VectorizationContext.mapTypeNameSynonyms(outputType);
+
+    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
+    Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
+
+    switch (columnVectorType) {
+    case LONG:
       longIndices[longIndicesIndex] = addIndex;
       indexLookup[addIndex].setLong(longIndicesIndex);
       ++longIndicesIndex;
-    } else if (VectorizationContext.isFloatFamily(outputType)) {
+      break;
+
+    case DOUBLE:
       doubleIndices[doubleIndicesIndex] = addIndex;
       indexLookup[addIndex].setDouble(doubleIndicesIndex);
       ++doubleIndicesIndex;
-    } else if (VectorizationContext.isStringFamily(outputType) ||
-        outputType.equalsIgnoreCase("binary")) {
+      break;
+
+    case BYTES:
       stringIndices[stringIndicesIndex]= addIndex;
       indexLookup[addIndex].setString(stringIndicesIndex);
       ++stringIndicesIndex;
-    } else if (VectorizationContext.isDecimalFamily(outputType)) {
-        decimalIndices[decimalIndicesIndex]= addIndex;
-        indexLookup[addIndex].setDecimal(decimalIndicesIndex);
-        ++decimalIndicesIndex;
-    }
-    else {
-      throw new HiveException("Unsuported vector output type: " + outputType);
+      break;
+
+    case DECIMAL:
+      decimalIndices[decimalIndicesIndex]= addIndex;
+      indexLookup[addIndex].setDecimal(decimalIndicesIndex);
+      ++decimalIndicesIndex;
+      break;
+
+    case TIMESTAMP:
+      timestampIndices[timestampIndicesIndex] = addIndex;
+      indexLookup[addIndex].setTimestamp(timestampIndicesIndex);
+      ++timestampIndicesIndex;
+      break;
+
+    case INTERVAL_DAY_TIME:
+      intervalDayTimeIndices[intervalDayTimeIndicesIndex] = addIndex;
+      indexLookup[addIndex].setIntervalDayTime(intervalDayTimeIndicesIndex);
+      ++intervalDayTimeIndicesIndex;
+      break;
+
+    default:
+      throw new HiveException("Unexpected column vector type " + columnVectorType);
     }
+
     addIndex++;
   }
 
@@ -147,5 +204,7 @@ public class VectorColumnSetInfo {
     doubleIndices = Arrays.copyOf(doubleIndices, doubleIndicesIndex);
     stringIndices = Arrays.copyOf(stringIndices, stringIndicesIndex);
     decimalIndices = Arrays.copyOf(decimalIndices, decimalIndicesIndex);
+    timestampIndices = Arrays.copyOf(timestampIndices, timestampIndicesIndex);
+    intervalDayTimeIndices = Arrays.copyOf(intervalDayTimeIndices, intervalDayTimeIndicesIndex);
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
index 0058141..97b1f24 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
@@ -20,7 +20,10 @@ package org.apache.hadoop.hive.ql.exec.vector;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 /**
  * This class copies specified columns of a row from one VectorizedRowBatch to another.
  */
@@ -183,10 +186,64 @@ public class VectorCopyRow {
     }
   }
 
+  private class TimestampCopyRow extends CopyRow {
+
+    TimestampCopyRow(int inColumnIndex, int outColumnIndex) {
+      super(inColumnIndex, outColumnIndex);
+    }
+
+    @Override
+    void copy(VectorizedRowBatch inBatch, int inBatchIndex, VectorizedRowBatch outBatch, int outBatchIndex) {
+      TimestampColumnVector inColVector = (TimestampColumnVector) inBatch.cols[inColumnIndex];
+      TimestampColumnVector outColVector = (TimestampColumnVector) outBatch.cols[outColumnIndex];
+
+      if (inColVector.isRepeating) {
+        if (inColVector.noNulls || !inColVector.isNull[0]) {
+          outColVector.setElement(outBatchIndex, 0, inColVector);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      } else {
+        if (inColVector.noNulls || !inColVector.isNull[inBatchIndex]) {
+          outColVector.setElement(outBatchIndex, inBatchIndex, inColVector);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      }
+    }
+  }
+
+  private class IntervalDayTimeCopyRow extends CopyRow {
+
+    IntervalDayTimeCopyRow(int inColumnIndex, int outColumnIndex) {
+      super(inColumnIndex, outColumnIndex);
+    }
+
+    @Override
+    void copy(VectorizedRowBatch inBatch, int inBatchIndex, VectorizedRowBatch outBatch, int outBatchIndex) {
+      IntervalDayTimeColumnVector inColVector = (IntervalDayTimeColumnVector) inBatch.cols[inColumnIndex];
+      IntervalDayTimeColumnVector outColVector = (IntervalDayTimeColumnVector) outBatch.cols[outColumnIndex];
+
+      if (inColVector.isRepeating) {
+        if (inColVector.noNulls || !inColVector.isNull[0]) {
+          outColVector.setElement(outBatchIndex, 0, inColVector);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      } else {
+        if (inColVector.noNulls || !inColVector.isNull[inBatchIndex]) {
+          outColVector.setElement(outBatchIndex, inBatchIndex, inColVector);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      }
+    }
+  }
+
   private CopyRow[] subRowToBatchCopiersByValue;
   private CopyRow[] subRowToBatchCopiersByReference;
 
-  public void init(VectorColumnMapping columnMapping) {
+  public void init(VectorColumnMapping columnMapping) throws HiveException {
     int count = columnMapping.getCount();
     subRowToBatchCopiersByValue = new CopyRow[count];
     subRowToBatchCopiersByReference = new CopyRow[count];
@@ -194,24 +251,43 @@ public class VectorCopyRow {
     for (int i = 0; i < count; i++) {
       int inputColumn = columnMapping.getInputColumns()[i];
       int outputColumn = columnMapping.getOutputColumns()[i];
-      String typeName = columnMapping.getTypeNames()[i];
+      String typeName = columnMapping.getTypeNames()[i].toLowerCase();
+      TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
+      Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
 
       CopyRow copyRowByValue = null;
       CopyRow copyRowByReference = null;
 
-      if (VectorizationContext.isIntFamily(typeName) ||
-          VectorizationContext.isDatetimeFamily(typeName)) {
+      switch (columnVectorType) {
+      case LONG:
         copyRowByValue = new LongCopyRow(inputColumn, outputColumn);
-      } else if (VectorizationContext.isFloatFamily(typeName)) {
+        break;
+
+      case TIMESTAMP:
+        copyRowByValue = new TimestampCopyRow(inputColumn, outputColumn);
+        break;
+
+      case INTERVAL_DAY_TIME:
+        copyRowByValue = new IntervalDayTimeCopyRow(inputColumn, outputColumn);
+        break;
+
+      case DOUBLE:
         copyRowByValue = new DoubleCopyRow(inputColumn, outputColumn);
-      } else if (VectorizationContext.isStringFamily(typeName)) {
+        break;
+
+      case BYTES:
         copyRowByValue = new BytesCopyRowByValue(inputColumn, outputColumn);
         copyRowByReference = new BytesCopyRowByReference(inputColumn, outputColumn);
-      } else if (VectorizationContext.decimalTypePattern.matcher(typeName).matches()){
+        break;
+
+      case DECIMAL:
         copyRowByValue = new DecimalCopyRow(inputColumn, outputColumn);
-      } else {
-        throw new RuntimeException("Cannot allocate vector copy row for " + typeName);
+        break;
+
+      default:
+        throw new HiveException("Unexpected column vector type " + columnVectorType);
       }
+
       subRowToBatchCopiersByValue[i] = copyRowByValue;
       if (copyRowByReference == null) {
         subRowToBatchCopiersByReference[i] = copyRowByValue;
@@ -243,4 +319,19 @@ public class VectorCopyRow {
       copyRow.copy(inBatch, inBatchIndex, outBatch, outBatchIndex);
     }
   }
-}
\ No newline at end of file
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("VectorCopyRow ");
+    for (CopyRow copyRow : subRowToBatchCopiersByValue) {
+      if (sb.length() > 0) {
+        sb.append(", ");
+      }
+      sb.append(copyRow.getClass().getName());
+      sb.append(" inColumnIndex " + copyRow.inColumnIndex);
+      sb.append(" outColumnIndex " + copyRow.outColumnIndex);
+    }
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 56cf9ba..0ff5083 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.io.EOFException;
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -203,7 +202,14 @@ public class VectorDeserializeRow {
     }
   }
 
-  private class TimestampReader extends AbstractLongReader {
+  private abstract class AbstractTimestampReader extends Reader {
+
+    AbstractTimestampReader(int columnIndex) {
+      super(columnIndex);
+    }
+  }
+
+  private class TimestampReader extends AbstractTimestampReader {
 
     DeserializeRead.ReadTimestampResults readTimestampResults;
 
@@ -214,16 +220,17 @@ public class VectorDeserializeRow {
 
     @Override
     void apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      LongColumnVector colVector = (LongColumnVector) batch.cols[columnIndex];
+      TimestampColumnVector colVector = (TimestampColumnVector) batch.cols[columnIndex];
 
       if (deserializeRead.readCheckNull()) {
         VectorizedBatchUtil.setNullColIsNullValue(colVector, batchIndex);
       } else {
         deserializeRead.readTimestamp(readTimestampResults);
-        Timestamp t = readTimestampResults.getTimestamp();
-        colVector.vector[batchIndex] = TimestampUtils.getTimeNanoSec(t);
+        colVector.set(batchIndex, readTimestampResults.getTimestamp());
+        colVector.isNull[batchIndex] = false;
       }
     }
+
   }
 
   private class IntervalYearMonthReader extends AbstractLongReader {
@@ -249,7 +256,14 @@ public class VectorDeserializeRow {
     }
   }
 
-  private class IntervalDayTimeReader extends AbstractLongReader {
+  private abstract class AbstractIntervalDayTimeReader extends Reader {
+
+    AbstractIntervalDayTimeReader(int columnIndex) {
+      super(columnIndex);
+    }
+  }
+
+  private class IntervalDayTimeReader extends AbstractIntervalDayTimeReader {
 
     DeserializeRead.ReadIntervalDayTimeResults readIntervalDayTimeResults;
 
@@ -260,14 +274,15 @@ public class VectorDeserializeRow {
 
     @Override
     void apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      LongColumnVector colVector = (LongColumnVector) batch.cols[columnIndex];
+      IntervalDayTimeColumnVector colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
 
       if (deserializeRead.readCheckNull()) {
         VectorizedBatchUtil.setNullColIsNullValue(colVector, batchIndex);
       } else {
         deserializeRead.readIntervalDayTime(readIntervalDayTimeResults);
-        HiveIntervalDayTime hidt = readIntervalDayTimeResults.getHiveIntervalDayTime();
-        colVector.vector[batchIndex] = DateUtils.getIntervalDayTimeTotalNanos(hidt);
+        HiveIntervalDayTime idt = readIntervalDayTimeResults.getHiveIntervalDayTime();
+        colVector.set(batchIndex, idt);
+        colVector.isNull[batchIndex] = false;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
index 9d241bd..b018979 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
@@ -43,7 +43,7 @@ public class VectorExpressionDescriptor {
   // LongColumnVector -->
   //    INT_FAMILY
   //    DATE
-  //    TIMESTAMP
+  //    INTERVAL_FAMILY
   //
   // DoubleColumnVector -->
   //    FLOAT_FAMILY
@@ -56,6 +56,12 @@ public class VectorExpressionDescriptor {
   //    CHAR
   //    VARCHAR
   //
+  // TimestampColumnVector -->
+  //    TIMESTAMP
+  //
+  // IntervalDayTimeColumnVector -->
+  //    INTERVAL_DAY_TIME
+  //
   public enum ArgumentType {
     NONE                    (0x000),
     INT_FAMILY              (0x001),
@@ -71,9 +77,8 @@ public class VectorExpressionDescriptor {
     INTERVAL_DAY_TIME       (0x200),
     DATETIME_FAMILY         (DATE.value | TIMESTAMP.value),
     INTERVAL_FAMILY         (INTERVAL_YEAR_MONTH.value | INTERVAL_DAY_TIME.value),
-    INT_TIMESTAMP_FAMILY    (INT_FAMILY.value | TIMESTAMP.value),
-    INT_INTERVAL_FAMILY     (INT_FAMILY.value | INTERVAL_FAMILY.value),
-    INT_DATETIME_INTERVAL_FAMILY  (INT_FAMILY.value | DATETIME_FAMILY.value | INTERVAL_FAMILY.value),
+    INT_INTERVAL_YEAR_MONTH     (INT_FAMILY.value | INTERVAL_YEAR_MONTH.value),
+    INT_DATE_INTERVAL_YEAR_MONTH  (INT_FAMILY.value | DATE.value | INTERVAL_YEAR_MONTH.value),
     STRING_DATETIME_FAMILY  (STRING_FAMILY.value | DATETIME_FAMILY.value),
     ALL_FAMILY              (0xFFF);
 
@@ -146,10 +151,13 @@ public class VectorExpressionDescriptor {
     public static String getVectorColumnSimpleName(ArgumentType argType) {
       if (argType == INT_FAMILY ||
           argType == DATE ||
-          argType == TIMESTAMP ||
-          argType == INTERVAL_YEAR_MONTH ||
-          argType == INTERVAL_DAY_TIME) {
+          argType == INTERVAL_YEAR_MONTH
+          ) {
         return "Long";
+      } else if (argType == TIMESTAMP) {
+        return "Timestamp";
+      } else if (argType == INTERVAL_DAY_TIME) {
+        return "IntervalDayTime";
       } else if (argType == FLOAT_FAMILY) {
         return "Double";
       } else if (argType == DECIMAL) {
@@ -341,7 +349,7 @@ public class VectorExpressionDescriptor {
           return ve;
         }
       } catch (Exception ex) {
-        throw new HiveException(ex);
+        throw new HiveException("Could not instantiate VectorExpression class " + ve.getSimpleName(), ex);
       }
     }
     if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index c662634..2a9a179 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -257,10 +257,29 @@ public abstract class VectorExtractRow {
     }
   }
 
-  private class TimestampExtractor extends AbstractLongExtractor {
+  private abstract class AbstractTimestampExtractor extends Extractor {
+
+    protected TimestampColumnVector colVector;
+
+    AbstractTimestampExtractor(int columnIndex) {
+      super(columnIndex);
+    }
+
+    @Override
+    void setColumnVector(VectorizedRowBatch batch) {
+      colVector = (TimestampColumnVector) batch.cols[columnIndex];
+    }
+
+    @Override
+    void forgetColumnVector() {
+      colVector = null;
+    }
+  }
+
+  private class TimestampExtractor extends AbstractTimestampExtractor {
+
+    protected Timestamp timestamp;
 
-    private Timestamp timestamp;
-    
     TimestampExtractor(int columnIndex) {
       super(columnIndex);
       object = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector.create(new Timestamp(0));
@@ -271,8 +290,7 @@ public abstract class VectorExtractRow {
     Object extract(int batchIndex) {
       int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
       if (colVector.noNulls || !colVector.isNull[adjustedIndex]) {
-        long value = vector[adjustedIndex];
-        TimestampUtils.assignTimeInNanoSec(value, timestamp);
+        colVector.timestampUpdate(timestamp, adjustedIndex);
         PrimitiveObjectInspectorFactory.writableTimestampObjectInspector.set(object, timestamp);
         return object;
       } else {
@@ -284,7 +302,7 @@ public abstract class VectorExtractRow {
   private class IntervalYearMonthExtractor extends AbstractLongExtractor {
 
     private HiveIntervalYearMonth hiveIntervalYearMonth;
-    
+
     IntervalYearMonthExtractor(int columnIndex) {
       super(columnIndex);
       object = PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector.create(new HiveIntervalYearMonth(0));
@@ -305,10 +323,29 @@ public abstract class VectorExtractRow {
     }
   }
 
-  private class IntervalDayTimeExtractor extends AbstractLongExtractor {
+  private abstract class AbstractIntervalDayTimeExtractor extends Extractor {
+
+    protected IntervalDayTimeColumnVector colVector;
+
+    AbstractIntervalDayTimeExtractor(int columnIndex) {
+      super(columnIndex);
+    }
+
+    @Override
+    void setColumnVector(VectorizedRowBatch batch) {
+      colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+    }
+
+    @Override
+    void forgetColumnVector() {
+      colVector = null;
+    }
+  }
+
+  private class IntervalDayTimeExtractor extends AbstractIntervalDayTimeExtractor {
 
     private HiveIntervalDayTime hiveIntervalDayTime;
-    
+
     IntervalDayTimeExtractor(int columnIndex) {
       super(columnIndex);
       object = PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector.create(new HiveIntervalDayTime(0, 0));
@@ -319,8 +356,7 @@ public abstract class VectorExtractRow {
     Object extract(int batchIndex) {
       int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
       if (colVector.noNulls || !colVector.isNull[adjustedIndex]) {
-        long value = vector[adjustedIndex];
-        DateUtils.setIntervalDayTimeTotalNanos(hiveIntervalDayTime, value);
+        hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(adjustedIndex));
         PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector.set(object, hiveIntervalDayTime);
         return object;
       } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
index fabac38..50d0452 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.io.IOException;
-import java.util.Arrays;
-
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -121,5 +119,29 @@ public class VectorGroupKeyHelper extends VectorColumnSetInfo {
         outputColumnVector.isNull[outputBatch.size] = true;
       }
     }
+    for(int i=0;i<timestampIndices.length; ++i) {
+      int keyIndex = timestampIndices[i];
+      TimestampColumnVector inputColumnVector = (TimestampColumnVector) inputBatch.cols[keyIndex];
+      TimestampColumnVector outputColumnVector = (TimestampColumnVector) outputBatch.cols[keyIndex];
+      if (inputColumnVector.noNulls || !inputColumnVector.isNull[0]) {
+
+        outputColumnVector.setElement(outputBatch.size, 0, inputColumnVector);
+      } else {
+        outputColumnVector.noNulls = false;
+        outputColumnVector.isNull[outputBatch.size] = true;
+      }
+    }
+    for(int i=0;i<intervalDayTimeIndices.length; ++i) {
+      int keyIndex = intervalDayTimeIndices[i];
+      IntervalDayTimeColumnVector inputColumnVector = (IntervalDayTimeColumnVector) inputBatch.cols[keyIndex];
+      IntervalDayTimeColumnVector outputColumnVector = (IntervalDayTimeColumnVector) outputBatch.cols[keyIndex];
+      if (inputColumnVector.noNulls || !inputColumnVector.isNull[0]) {
+
+        outputColumnVector.setElement(outputBatch.size, 0, inputColumnVector);
+      } else {
+        outputColumnVector.noNulls = false;
+        outputColumnVector.isNull[outputBatch.size] = true;
+      }
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
index aff3551..8a101a6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
@@ -18,14 +18,16 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Timestamp;
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.KeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
@@ -42,6 +44,8 @@ public class VectorHashKeyWrapper extends KeyWrapper {
   private static final double[] EMPTY_DOUBLE_ARRAY = new double[0];
   private static final byte[][] EMPTY_BYTES_ARRAY = new byte[0][];
   private static final HiveDecimalWritable[] EMPTY_DECIMAL_ARRAY = new HiveDecimalWritable[0];
+  private static final Timestamp[] EMPTY_TIMESTAMP_ARRAY = new Timestamp[0];
+  private static final HiveIntervalDayTime[] EMPTY_INTERVAL_DAY_TIME_ARRAY = new HiveIntervalDayTime[0];
 
   private long[] longValues;
   private double[] doubleValues;
@@ -52,14 +56,21 @@ public class VectorHashKeyWrapper extends KeyWrapper {
 
   private HiveDecimalWritable[] decimalValues;
 
+  private Timestamp[] timestampValues;
+
+  private HiveIntervalDayTime[] intervalDayTimeValues;
+
   private boolean[] isNull;
   private int hashcode;
 
   public VectorHashKeyWrapper(int longValuesCount, int doubleValuesCount,
-          int byteValuesCount, int decimalValuesCount) {
+          int byteValuesCount, int decimalValuesCount, int timestampValuesCount,
+          int intervalDayTimeValuesCount) {
     longValues = longValuesCount > 0 ? new long[longValuesCount] : EMPTY_LONG_ARRAY;
     doubleValues = doubleValuesCount > 0 ? new double[doubleValuesCount] : EMPTY_DOUBLE_ARRAY;
     decimalValues = decimalValuesCount > 0 ? new HiveDecimalWritable[decimalValuesCount] : EMPTY_DECIMAL_ARRAY;
+    timestampValues = timestampValuesCount > 0 ? new Timestamp[timestampValuesCount] : EMPTY_TIMESTAMP_ARRAY;
+    intervalDayTimeValues = intervalDayTimeValuesCount > 0 ? new HiveIntervalDayTime[intervalDayTimeValuesCount] : EMPTY_INTERVAL_DAY_TIME_ARRAY;
     for(int i = 0; i < decimalValuesCount; ++i) {
       decimalValues[i] = new HiveDecimalWritable(HiveDecimal.ZERO);
     }
@@ -72,7 +83,14 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       byteStarts = EMPTY_INT_ARRAY;
       byteLengths = EMPTY_INT_ARRAY;
     }
-    isNull = new boolean[longValuesCount + doubleValuesCount + byteValuesCount + decimalValuesCount];
+    for(int i = 0; i < timestampValuesCount; ++i) {
+      timestampValues[i] = new Timestamp(0);
+    }
+    for(int i = 0; i < intervalDayTimeValuesCount; ++i) {
+      intervalDayTimeValues[i] = new HiveIntervalDayTime();
+    }
+    isNull = new boolean[longValuesCount + doubleValuesCount + byteValuesCount +
+                         decimalValuesCount + timestampValuesCount + intervalDayTimeValuesCount];
     hashcode = 0;
   }
 
@@ -94,6 +112,14 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       hashcode ^= decimalValues[i].getHiveDecimal().hashCode();
     }
 
+    for (int i = 0; i < timestampValues.length; i++) {
+      hashcode ^= timestampValues[i].hashCode();
+    }
+
+    for (int i = 0; i < intervalDayTimeValues.length; i++) {
+      hashcode ^= intervalDayTimeValues[i].hashCode();
+    }
+
     // This code, with branches and all, is not executed if there are no string keys
     for (int i = 0; i < byteValues.length; ++i) {
       /*
@@ -131,6 +157,8 @@ public class VectorHashKeyWrapper extends KeyWrapper {
           Arrays.equals(longValues, keyThat.longValues) &&
           Arrays.equals(doubleValues, keyThat.doubleValues) &&
           Arrays.equals(decimalValues,  keyThat.decimalValues) &&
+          Arrays.equals(timestampValues,  keyThat.timestampValues) &&
+          Arrays.equals(intervalDayTimeValues,  keyThat.intervalDayTimeValues) &&
           Arrays.equals(isNull, keyThat.isNull) &&
           byteValues.length == keyThat.byteValues.length &&
           (0 == byteValues.length || bytesEquals(keyThat));
@@ -196,6 +224,23 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       clone.byteStarts = EMPTY_INT_ARRAY;
       clone.byteLengths = EMPTY_INT_ARRAY;
     }
+    if (timestampValues.length > 0) {
+      clone.timestampValues = new Timestamp[timestampValues.length];
+      for(int i = 0; i < timestampValues.length; ++i) {
+        clone.timestampValues[i] = (Timestamp) timestampValues[i].clone();
+      }
+    } else {
+      clone.timestampValues = EMPTY_TIMESTAMP_ARRAY;
+    }
+    if (intervalDayTimeValues.length > 0) {
+      clone.intervalDayTimeValues = new HiveIntervalDayTime[intervalDayTimeValues.length];
+      for(int i = 0; i < intervalDayTimeValues.length; ++i) {
+        clone.intervalDayTimeValues[i] = (HiveIntervalDayTime) intervalDayTimeValues[i].clone();
+      }
+    } else {
+      clone.intervalDayTimeValues = EMPTY_INTERVAL_DAY_TIME_ARRAY;
+    }
+
     clone.hashcode = hashcode;
     assert clone.equals(this);
   }
@@ -256,14 +301,50 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       isNull[longValues.length + doubleValues.length + byteValues.length + index] = true;
   }
 
+  public void assignTimestamp(int index, Timestamp value) {
+    timestampValues[index] = value;
+    isNull[longValues.length + doubleValues.length + byteValues.length +
+           decimalValues.length + index] = false;
+  }
+
+  public void assignTimestamp(int index, TimestampColumnVector colVector, int elementNum) {
+    colVector.timestampUpdate(timestampValues[index], elementNum);
+    isNull[longValues.length + doubleValues.length + byteValues.length +
+           decimalValues.length + index] = false;
+  }
+
+  public void assignNullTimestamp(int index) {
+      isNull[longValues.length + doubleValues.length + byteValues.length +
+             decimalValues.length + index] = true;
+  }
+
+  public void assignIntervalDayTime(int index, HiveIntervalDayTime value) {
+    intervalDayTimeValues[index].set(value);
+    isNull[longValues.length + doubleValues.length + byteValues.length +
+           decimalValues.length + timestampValues.length + index] = false;
+  }
+
+  public void assignIntervalDayTime(int index, IntervalDayTimeColumnVector colVector, int elementNum) {
+    intervalDayTimeValues[index].set(colVector.asScratchIntervalDayTime(elementNum));
+    isNull[longValues.length + doubleValues.length + byteValues.length +
+           decimalValues.length + timestampValues.length + index] = false;
+  }
+
+  public void assignNullIntervalDayTime(int index) {
+      isNull[longValues.length + doubleValues.length + byteValues.length +
+             decimalValues.length + timestampValues.length + index] = true;
+  }
+
   @Override
   public String toString()
   {
-    return String.format("%d[%s] %d[%s] %d[%s] %d[%s]",
+    return String.format("%d[%s] %d[%s] %d[%s] %d[%s] %d[%s] %d[%s]",
         longValues.length, Arrays.toString(longValues),
         doubleValues.length, Arrays.toString(doubleValues),
         byteValues.length, Arrays.toString(byteValues),
-        decimalValues.length, Arrays.toString(decimalValues));
+        decimalValues.length, Arrays.toString(decimalValues),
+        timestampValues.length, Arrays.toString(timestampValues),
+        intervalDayTimeValues.length, Arrays.toString(intervalDayTimeValues));
   }
 
   public boolean getIsLongNull(int i) {
@@ -315,5 +396,23 @@ public class VectorHashKeyWrapper extends KeyWrapper {
   public HiveDecimalWritable getDecimal(int i) {
     return decimalValues[i];
   }
+
+  public boolean getIsTimestampNull(int i) {
+    return isNull[longValues.length + doubleValues.length + byteValues.length +
+                  decimalValues.length + i];
+  }
+
+  public Timestamp getTimestamp(int i) {
+    return timestampValues[i];
+  }
+
+  public boolean getIsIntervalDayTimeNull(int i) {
+    return isNull[longValues.length + doubleValues.length + byteValues.length +
+                  decimalValues.length + timestampValues.length + i];
+  }
+
+  public HiveIntervalDayTime getIntervalDayTime(int i) {
+    return intervalDayTimeValues[i];
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
index 6333222..bfd26ae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
@@ -18,13 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
-import java.util.Arrays;
-
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
  * Class for handling vectorized hash map key wrappers. It evaluates the key columns in a
@@ -157,27 +155,71 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
       }
     }
     for(int i=0;i<decimalIndices.length; ++i) {
-        int keyIndex = decimalIndices[i];
-        int columnIndex = keyExpressions[keyIndex].getOutputColumn();
-        DecimalColumnVector columnVector = (DecimalColumnVector) batch.cols[columnIndex];
-        if (columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
-          assignDecimalNoNullsNoRepeatingNoSelection(i, batch.size, columnVector);
-        } else if (columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
-          assignDecimalNoNullsNoRepeatingSelection(i, batch.size, columnVector, batch.selected);
-        } else if (columnVector.noNulls && columnVector.isRepeating) {
-          assignDecimalNoNullsRepeating(i, batch.size, columnVector);
-        } else if (!columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
-          assignDecimalNullsNoRepeatingNoSelection(i, batch.size, columnVector);
-        } else if (!columnVector.noNulls && columnVector.isRepeating) {
-          assignDecimalNullsRepeating(i, batch.size, columnVector);
-        } else if (!columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
-          assignDecimalNullsNoRepeatingSelection (i, batch.size, columnVector, batch.selected);
-        } else {
-          throw new HiveException (String.format(
-              "Unimplemented Decimal null/repeat/selected combination %b/%b/%b",
-              columnVector.noNulls, columnVector.isRepeating, batch.selectedInUse));
-        }
+      int keyIndex = decimalIndices[i];
+      int columnIndex = keyExpressions[keyIndex].getOutputColumn();
+      DecimalColumnVector columnVector = (DecimalColumnVector) batch.cols[columnIndex];
+      if (columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignDecimalNoNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignDecimalNoNullsNoRepeatingSelection(i, batch.size, columnVector, batch.selected);
+      } else if (columnVector.noNulls && columnVector.isRepeating) {
+        assignDecimalNoNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignDecimalNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && columnVector.isRepeating) {
+        assignDecimalNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignDecimalNullsNoRepeatingSelection (i, batch.size, columnVector, batch.selected);
+      } else {
+        throw new HiveException (String.format(
+            "Unimplemented Decimal null/repeat/selected combination %b/%b/%b",
+            columnVector.noNulls, columnVector.isRepeating, batch.selectedInUse));
+      }
+    }
+    for(int i=0;i<timestampIndices.length; ++i) {
+      int keyIndex = timestampIndices[i];
+      int columnIndex = keyExpressions[keyIndex].getOutputColumn();
+      TimestampColumnVector columnVector = (TimestampColumnVector) batch.cols[columnIndex];
+      if (columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignTimestampNoNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignTimestampNoNullsNoRepeatingSelection(i, batch.size, columnVector, batch.selected);
+      } else if (columnVector.noNulls && columnVector.isRepeating) {
+        assignTimestampNoNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignTimestampNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && columnVector.isRepeating) {
+        assignTimestampNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignTimestampNullsNoRepeatingSelection (i, batch.size, columnVector, batch.selected);
+      } else {
+        throw new HiveException (String.format(
+            "Unimplemented timestamp null/repeat/selected combination %b/%b/%b",
+            columnVector.noNulls, columnVector.isRepeating, batch.selectedInUse));
+      }
+    }
+    for(int i=0;i<intervalDayTimeIndices.length; ++i) {
+      int keyIndex = intervalDayTimeIndices[i];
+      int columnIndex = keyExpressions[keyIndex].getOutputColumn();
+      IntervalDayTimeColumnVector columnVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+      if (columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignIntervalDayTimeNoNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignIntervalDayTimeNoNullsNoRepeatingSelection(i, batch.size, columnVector, batch.selected);
+      } else if (columnVector.noNulls && columnVector.isRepeating) {
+        assignIntervalDayTimeNoNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignIntervalDayTimeNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && columnVector.isRepeating) {
+        assignIntervalDayTimeNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignIntervalDayTimeNullsNoRepeatingSelection (i, batch.size, columnVector, batch.selected);
+      } else {
+        throw new HiveException (String.format(
+            "Unimplemented intervalDayTime null/repeat/selected combination %b/%b/%b",
+            columnVector.noNulls, columnVector.isRepeating, batch.selectedInUse));
       }
+    }
     for(int i=0;i<batch.size;++i) {
       vectorHashKeyWrappers[i].setHashKey();
     }
@@ -504,6 +546,154 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
   }
 
   /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for Timestamp type, possible nulls, no repeat values, batch selection vector.
+   */
+  private void assignTimestampNullsNoRepeatingSelection(int index, int size,
+      TimestampColumnVector columnVector, int[] selected) {
+    for(int i = 0; i < size; ++i) {
+      int row = selected[i];
+      if (!columnVector.isNull[row]) {
+        vectorHashKeyWrappers[i].assignTimestamp(index, columnVector, row);
+      } else {
+        vectorHashKeyWrappers[i].assignNullTimestamp(index);
+      }
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for Timestamp type, repeat null values.
+   */
+  private void assignTimestampNullsRepeating(int index, int size,
+      TimestampColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignNullTimestamp(index);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for Timestamp type, possible nulls, repeat values.
+   */
+  private void assignTimestampNullsNoRepeatingNoSelection(int index, int size,
+      TimestampColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      if (!columnVector.isNull[r]) {
+        vectorHashKeyWrappers[r].assignTimestamp(index, columnVector, r);
+      } else {
+        vectorHashKeyWrappers[r].assignNullTimestamp(index);
+      }
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for Timestamp type, no nulls, repeat values, no selection vector.
+   */
+  private void assignTimestampNoNullsRepeating(int index, int size, TimestampColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignTimestamp(index, columnVector, 0);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for Timestamp type, no nulls, no repeat values, batch selection vector.
+   */
+  private void assignTimestampNoNullsNoRepeatingSelection(int index, int size,
+      TimestampColumnVector columnVector, int[] selected) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignTimestamp(index, columnVector, selected[r]);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for Timestamp type, no nulls, no repeat values, no selection vector.
+   */
+  private void assignTimestampNoNullsNoRepeatingNoSelection(int index, int size,
+      TimestampColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignTimestamp(index, columnVector, r);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, possible nulls, no repeat values, batch selection vector.
+   */
+  private void assignIntervalDayTimeNullsNoRepeatingSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector, int[] selected) {
+    for(int i = 0; i < size; ++i) {
+      int row = selected[i];
+      if (!columnVector.isNull[row]) {
+        vectorHashKeyWrappers[i].assignIntervalDayTime(index, columnVector, row);
+      } else {
+        vectorHashKeyWrappers[i].assignNullIntervalDayTime(index);
+      }
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, repeat null values.
+   */
+  private void assignIntervalDayTimeNullsRepeating(int index, int size,
+      IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignNullIntervalDayTime(index);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, possible nulls, repeat values.
+   */
+  private void assignIntervalDayTimeNullsNoRepeatingNoSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      if (!columnVector.isNull[r]) {
+        vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, r);
+      } else {
+        vectorHashKeyWrappers[r].assignNullIntervalDayTime(index);
+      }
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, no nulls, repeat values, no selection vector.
+   */
+  private void assignIntervalDayTimeNoNullsRepeating(int index, int size, IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, 0);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, no nulls, no repeat values, batch selection vector.
+   */
+  private void assignIntervalDayTimeNoNullsNoRepeatingSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector, int[] selected) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, selected[r]);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, no nulls, no repeat values, no selection vector.
+   */
+  private void assignIntervalDayTimeNoNullsNoRepeatingNoSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, r);
+    }
+  }
+
+  /**
    * Prepares a VectorHashKeyWrapperBatch to work for a specific set of keys.
    * Computes the fast access lookup indices, preallocates all needed internal arrays.
    * This step is done only once per query, not once per batch. The information computed now
@@ -544,16 +734,19 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForDoubleArrayOfSize(compiledKeyWrapperBatch.doubleIndices.length);
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.stringIndices.length);
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.decimalIndices.length);
+    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.timestampIndices.length);
+    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.intervalDayTimeIndices.length);
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForIntArrayOfSize(compiledKeyWrapperBatch.longIndices.length) * 2;
     compiledKeyWrapperBatch.keysFixedSize +=
         model.lengthForBooleanArrayOfSize(keyExpressions.length);
 
     return compiledKeyWrapperBatch;
   }
-  
+
   public VectorHashKeyWrapper allocateKeyWrapper() {
     return new VectorHashKeyWrapper(longIndices.length, doubleIndices.length,
-        stringIndices.length, decimalIndices.length);
+        stringIndices.length, decimalIndices.length, timestampIndices.length,
+        intervalDayTimeIndices.length);
   }
 
   /**
@@ -581,11 +774,19 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
       return kw.getIsDecimalNull(klh.decimalIndex)? null :
           keyOutputWriter.writeValue(
                 kw.getDecimal(klh.decimalIndex).getHiveDecimal());
-    }
-    else {
+    } else if (klh.timestampIndex >= 0) {
+      return kw.getIsTimestampNull(klh.timestampIndex)? null :
+          keyOutputWriter.writeValue(
+                kw.getTimestamp(klh.timestampIndex));
+    } else if (klh.intervalDayTimeIndex >= 0) {
+      return kw.getIsIntervalDayTimeNull(klh.intervalDayTimeIndex)? null :
+        keyOutputWriter.writeValue(
+              kw.getIntervalDayTime(klh.intervalDayTimeIndex));
+    } else {
       throw new HiveException(String.format(
-          "Internal inconsistent KeyLookupHelper at index [%d]:%d %d %d %d",
-          i, klh.longIndex, klh.doubleIndex, klh.stringIndex, klh.decimalIndex));
+          "Internal inconsistent KeyLookupHelper at index [%d]:%d %d %d %d %d %d",
+          i, klh.longIndex, klh.doubleIndex, klh.stringIndex, klh.decimalIndex,
+          klh.timestampIndex, klh.intervalDayTimeIndex));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index 5586944..47acbe4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.sql.Timestamp;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -254,7 +255,7 @@ public class VectorSerializeRow {
     }
   }
 
-  private class TimestampWriter extends AbstractLongWriter {
+  private class TimestampWriter extends Writer {
 
     Timestamp scratchTimestamp;
 
@@ -265,11 +266,11 @@ public class VectorSerializeRow {
 
     @Override
     boolean apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      LongColumnVector colVector = (LongColumnVector) batch.cols[columnIndex];
+      TimestampColumnVector colVector = (TimestampColumnVector) batch.cols[columnIndex];
 
       if (colVector.isRepeating) {
         if (colVector.noNulls || !colVector.isNull[0]) {
-          TimestampUtils.assignTimeInNanoSec(colVector.vector[0], scratchTimestamp);
+          colVector.timestampUpdate(scratchTimestamp, 0);
           serializeWrite.writeTimestamp(scratchTimestamp);
           return true;
         } else {
@@ -278,7 +279,7 @@ public class VectorSerializeRow {
         }
       } else {
         if (colVector.noNulls || !colVector.isNull[batchIndex]) {
-          TimestampUtils.assignTimeInNanoSec(colVector.vector[batchIndex], scratchTimestamp);
+          colVector.timestampUpdate(scratchTimestamp, batchIndex);
           serializeWrite.writeTimestamp(scratchTimestamp);
           return true;
         } else {
@@ -319,19 +320,23 @@ public class VectorSerializeRow {
     }
   }
 
-  private class IntervalDayTimeWriter extends AbstractLongWriter {
+  private class IntervalDayTimeWriter extends Writer {
+
+    private HiveIntervalDayTime hiveIntervalDayTime;
 
     IntervalDayTimeWriter(int columnIndex) {
       super(columnIndex);
+      hiveIntervalDayTime = new HiveIntervalDayTime();
     }
 
     @Override
     boolean apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      LongColumnVector colVector = (LongColumnVector) batch.cols[columnIndex];
+      IntervalDayTimeColumnVector colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
 
       if (colVector.isRepeating) {
         if (colVector.noNulls || !colVector.isNull[0]) {
-          serializeWrite.writeHiveIntervalDayTime(colVector.vector[0]);
+          hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(0));
+          serializeWrite.writeHiveIntervalDayTime(hiveIntervalDayTime);
           return true;
         } else {
           serializeWrite.writeNull();
@@ -339,7 +344,8 @@ public class VectorSerializeRow {
         }
       } else {
         if (colVector.noNulls || !colVector.isNull[batchIndex]) {
-          serializeWrite.writeHiveIntervalDayTime(colVector.vector[batchIndex]);
+          hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(batchIndex));
+          serializeWrite.writeHiveIntervalDayTime(hiveIntervalDayTime);
           return true;
         } else {
           serializeWrite.writeNull();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java
index 1363004..6b5740e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java
@@ -25,7 +25,6 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -170,8 +169,8 @@ public class VectorSerializeRowNoNulls {
 
     @Override
     void apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      LongColumnVector colVector = (LongColumnVector) batch.cols[columnIndex];
-      TimestampUtils.assignTimeInNanoSec(colVector.vector[colVector.isRepeating ? 0 : batchIndex], scratchTimestamp);
+      TimestampColumnVector colVector = (TimestampColumnVector) batch.cols[columnIndex];
+      colVector.timestampUpdate(scratchTimestamp, colVector.isRepeating ? 0 : batchIndex);
       serializeWrite.writeTimestamp(scratchTimestamp);
     }
   }
@@ -191,14 +190,18 @@ public class VectorSerializeRowNoNulls {
 
   private class IntervalDayTimeWriter extends AbstractLongWriter {
 
+    private HiveIntervalDayTime hiveIntervalDayTime;
+
     IntervalDayTimeWriter(int columnIndex) {
       super(columnIndex);
+      hiveIntervalDayTime = new HiveIntervalDayTime();
     }
 
     @Override
     void apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      LongColumnVector colVector = (LongColumnVector) batch.cols[columnIndex];
-      serializeWrite.writeHiveIntervalDayTime(colVector.vector[colVector.isRepeating ? 0 : batchIndex]);
+      IntervalDayTimeColumnVector colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+      hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(colVector.isRepeating ? 0 : batchIndex));
+      serializeWrite.writeHiveIntervalDayTime(hiveIntervalDayTime);
     }
   }
 


[15/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
new file mode 100644
index 0000000..ddde913
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private HiveIntervalYearMonth value;
+  private int outputColumn;
+  private Date scratchDate2;
+  private Date outputDate;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(long value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new HiveIntervalYearMonth((int) value);
+    this.outputColumn = outputColumn;
+    scratchDate2 = new Date(0);
+    outputDate = new Date(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type date.
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type Date.
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+    long[] vector2 = inputColVector2.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.<OperatorMethod>(
+          value, scratchDate2, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt
new file mode 100644
index 0000000..cbb7021
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticTimestampColumn.txt
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampScalarArithmeticIntervalYearMonthColumn.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private HiveIntervalYearMonth value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(long value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new HiveIntervalYearMonth((int) value);
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type timestamp.
+    TimestampColumnVector inputColVector2 = (TimestampColumnVector) batch.cols[colNum];
+
+    // Output is type Timestamp.
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      dtm.<OperatorMethod>(
+          value, inputColVector2.asScratchTimestamp(0), outputColVector.getScratchTimestamp());
+      outputColVector.setFromScratchTimestamp(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt
new file mode 100644
index 0000000..9ccfaac
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampColumn.txt
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnCompareTimestampColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+    TimestampColumnVector inputColVector2 = (TimestampColumnVector) batch.cols[colNum2];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    <OperandType>[] vector1 = inputColVector1.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      outputVector[0] = vector1[0] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(0) ? 1 : 0;
+    } else if (inputColVector1.isRepeating) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = vector1[0] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = vector1[0] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <OperandType> value2 = inputColVector2.<GetTimestampLongDoubleMethod>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = vector1[i] <OperatorSymbol> value2 ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = vector1[i] <OperatorSymbol> value2 ? 1 : 0;
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = vector1[i] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = vector1[i] <OperatorSymbol> inputColVector2.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesLong(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
new file mode 100644
index 0000000..c7d8c65
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnCompareTimestampScalar.txt, which covers binary comparison
+ * expressions between a column and a scalar. The boolean output is stored in a
+ * separate boolean column.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <OperandType> value;
+  private int outputColumn;
+
+  public <ClassName>(int colNum, Timestamp value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = TimestampColumnVector.<GetTimestampLongDoubleMethod>(value);
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector1.isNull;
+    boolean[] outNulls = outputColVector.isNull;
+    int n = batch.size;
+    <OperandType>[] vector1 = inputColVector1.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    if (inputColVector1.noNulls) {
+      if (inputColVector1.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        outputVector[0] = vector1[0] <OperatorSymbol> value ? 1 : 0;
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = vector1[i] <OperatorSymbol> value ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = vector1[i] <OperatorSymbol> value ? 1 : 0;
+        }
+      }
+    } else {
+      if (inputColVector1.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          outputVector[0] = vector1[0] <OperatorSymbol> value ? 1 : 0;
+          outNulls[0] = false;
+        } else {
+          outNulls[0] = true;
+        }
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+            outputVector[i] = vector1[i] <OperatorSymbol> value ? 1 : 0;
+            outNulls[i] = false;
+          } else {
+            //comparison with null is null
+            outNulls[i] = true;
+          }
+        }
+      } else {
+        System.arraycopy(nullPos, 0, outNulls, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            outputVector[i] = vector1[i] <OperatorSymbol> value ? 1 : 0;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleScalarCompareTimestampColumn.txt
new file mode 100644
index 0000000..d47bc10
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleScalarCompareTimestampColumn.txt
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ScalarCompareTimestamp.txt, which covers comparison
+ * expressions between a long/double scalar and a column. The boolean output is stored in a
+ * separate boolean column.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <OperandType> value;
+  private int outputColumn;
+
+  public <ClassName>(<OperandType> value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    boolean[] outNulls = outputColVector.isNull;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+    outputColVector.noNulls = inputColVector.noNulls;
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        outputVector[0] = value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(0) ? 1 : 0;
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          outputVector[0] = value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(0) ? 1 : 0;
+          outNulls[0] = false;
+        } else {
+          outNulls[0] = true;
+        }
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+            outputVector[i] = value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+            outNulls[i] = false;
+          } else {
+            //comparison with null is null
+            outNulls[i] = true;
+          }
+        }
+      } else {
+        System.arraycopy(nullPos, 0, outNulls, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            outputVector[i] = value <OperatorSymbol> inputColVector.<GetTimestampLongDoubleMethod>(i) ? 1 : 0;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/ScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/ScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/ScalarCompareTimestampColumn.txt
deleted file mode 100644
index 7867610..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/ScalarCompareTimestampColumn.txt
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.udf.UDFToString;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import java.sql.Timestamp;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
-import org.apache.hadoop.io.LongWritable;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-
-/**
- * Generated from template ScalarCompareTimestampColumn.txt, which covers comparison 
- * expressions between a timestamp column and a long or double scalar. The boolean output
- * is stored in a separate boolean column.
- * Note: For timestamp and long or double we implicitly interpret the long as the number
- * of seconds or double as seconds and fraction since the epoch.
- */
-public class <ClassName> extends <BaseClassName> {
-
-  public <ClassName>(<OperandType> value, int colNum, int outputColumn) {
-    super(TimestampUtils.<TimestampScalarConversion>(value), colNum, outputColumn);
-  }
-
-  public <ClassName>() {
-    super();
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.SCALAR,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
new file mode 100644
index 0000000..27e083d
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template TimestampColumnArithmeticDateColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private Timestamp scratchTimestamp2;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchTimestamp2 = new Timestamp(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #1 is type <OperandType1>.
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+
+    // Input #2 is type date (days).  For the math we convert it to a timestamp.
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+
+    long[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.<OperatorMethod>(
+          inputColVector1.asScratch<CamelOperandType1>(0), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+    } else if (inputColVector1.isRepeating) {
+      <HiveOperandType1> value1 = inputColVector1.asScratch<CamelOperandType1>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value1, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              value1, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
new file mode 100644
index 0000000..8b91a4a
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template TimestampColumnArithmeticDateScalar.txt, which covers binary arithmetic
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, long value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #1 is type <OperandType1>.
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      dtm.<OperatorMethod>(
+          inputColVector1.asScratch<CamelOperandType1>(0), value, outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt
new file mode 100644
index 0000000..4ac2174
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthColumn.txt
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth2;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth2 = new HiveIntervalYearMonth();
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type Timestamp.
+    TimestampColumnVector inputColVector1 = (TimestampColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type Interval_Year_Month (months).
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
+
+    // Output is type Timestamp.
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+
+    long[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      dtm.<OperatorMethod>(
+          inputColVector1.asScratchTimestamp(0), scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+      outputColVector.setFromScratchTimestamp(0);
+    } else if (inputColVector1.isRepeating) {
+      Timestamp value1 = inputColVector1.asScratchTimestamp(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value1, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+         scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value1, scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), scratchIntervalYearMonth2, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesTimestamp(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthScalar.txt
new file mode 100644
index 0000000..9382aca
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticIntervalYearMonthScalar.txt
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private HiveIntervalYearMonth value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, long value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new HiveIntervalYearMonth((int) value);
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type Timestamp.
+    TimestampColumnVector inputColVector1 = (TimestampColumnVector) batch.cols[colNum];
+
+    // Output is type Timestamp.
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      dtm.<OperatorMethod>(
+          inputColVector1.asScratchTimestamp(0), value, outputColVector.getScratchTimestamp());
+      outputColVector.setFromScratchTimestamp(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratchTimestamp(i), value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt
new file mode 100644
index 0000000..5eaa450
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampColumn.txt
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampColumnArithmeticTimestampColumnBase.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type <OperandType1>.
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+
+    // Input #2 is type <OperandType2>.
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      dtm.<OperatorMethod>(
+          inputColVector1.asScratch<CamelOperandType1>(0), inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+    } else if (inputColVector1.isRepeating) {
+      <HiveOperandType1> value1 = inputColVector1.asScratch<CamelOperandType1>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <HiveOperandType2> value2 = inputColVector2.asScratch<CamelOperandType2>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
new file mode 100644
index 0000000..c6c872f
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampColumnArithmeticTimestampScalar.txt, which covers binary arithmetic
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType2> value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, <HiveOperandType2> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type <OperandType1>.
+    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      dtm.<OperatorMethod>(
+          inputColVector1.asScratch<CamelOperandType1>(0), value, outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              inputColVector1.asScratch<CamelOperandType1>(i), value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleColumn.txt
new file mode 100644
index 0000000..0fc402d
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleColumn.txt
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnArithmeticColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector1 = (TimestampColumnVector) batch.cols[colNum1];
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    <OperandType>[] vector2 = inputColVector2.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      outputVector[0] = inputColVector1.<GetTimestampLongDoubleMethod>(0) <OperatorSymbol> vector2[0] ? 1 : 0;
+    } else if (inputColVector1.isRepeating) {
+      <OperandType> value1 = inputColVector1.<GetTimestampLongDoubleMethod>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = value1 <OperatorSymbol> vector2[i] ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = value1 <OperatorSymbol> vector2[i] ? 1 : 0;
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <OperandType> value2 = vector2[0];
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> vector2[0] ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> vector2[0] ? 1 : 0;
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> vector2[i] ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector1.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> vector2[i] ? 1 : 0;
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesLong(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleScalar.txt
new file mode 100644
index 0000000..e0ae206
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareLongDoubleScalar.txt
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template TimestampColumnCompareScalar.txt, which covers comparison
+ * expressions between a Timestamp column and a long/double scalar. The boolean output is stored
+ * in a separate boolean column.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <OperandType> value;
+  private int outputColumn;
+
+  public <ClassName>(int colNum, <OperandType> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    boolean[] outNulls = outputColVector.isNull;
+    int n = batch.size;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = false;
+    outputColVector.noNulls = inputColVector.noNulls;
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        outputVector[0] = inputColVector.<GetTimestampLongDoubleMethod>(0) <OperatorSymbol> value ? 1 : 0;
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          outputVector[i] = inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value ? 1 : 0;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputVector[i] = inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value ? 1 : 0;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          outputVector[0] = inputColVector.<GetTimestampLongDoubleMethod>(0) <OperatorSymbol> value ? 1 : 0;
+          outNulls[0] = false;
+        } else {
+          outNulls[0] = true;
+        }
+        outputColVector.isRepeating = true;
+      } else if (batch.selectedInUse) {
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+            outputVector[i] = inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value ? 1 : 0;
+            outNulls[i] = false;
+          } else {
+            //comparison with null is null
+            outNulls[i] = true;
+          }
+        }
+      } else {
+        System.arraycopy(nullPos, 0, outNulls, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            outputVector[i] = inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value ? 1 : 0;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}


[09/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthTimestamp.java
new file mode 100644
index 0000000..e966636
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFMonthTimestamp.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+
+/**
+ * Returns month value.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFMonthTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFMonthTimestamp(int colNum, int outputColumn) {
+    super(Calendar.MONTH, colNum, outputColumn);
+  }
+
+  public VectorUDFMonthTimestamp() {
+    super();
+  }
+
+  @Override
+  protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) {
+    /* january is 0 */
+    return 1 + super.getTimestampField(timestampColVector, elementNum);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondDate.java
new file mode 100644
index 0000000..fbae390
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondDate.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get seconds.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFSecondDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFSecondDate(int colNum, int outputColumn) {
+    super(Calendar.SECOND, colNum, outputColumn);
+  }
+
+  public VectorUDFSecondDate() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondLong.java
deleted file mode 100644
index 41655ec..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondLong.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Calendar;
-
-/**
- * Expression to get seconds.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFSecondLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  public VectorUDFSecondLong(int colNum, int outputColumn) {
-    super(Calendar.SECOND, colNum, outputColumn);
-  }
-
-  public VectorUDFSecondLong() {
-    super();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondTimestamp.java
new file mode 100644
index 0000000..97842f0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFSecondTimestamp.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get seconds.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFSecondTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFSecondTimestamp(int colNum, int outputColumn) {
+    super(Calendar.SECOND, colNum, outputColumn);
+  }
+
+  public VectorUDFSecondTimestamp() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
new file mode 100644
index 0000000..0a3a87a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+import com.google.common.base.Preconditions;
+
+
+/**
+ * Abstract class to return various fields from a Timestamp or Date.
+ */
+public abstract class VectorUDFTimestampFieldDate extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  protected int colNum;
+  protected int outputColumn;
+  protected int field;
+  protected transient final Calendar calendar = Calendar.getInstance();
+
+  public VectorUDFTimestampFieldDate(int field, int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+    this.field = field;
+  }
+
+  public VectorUDFTimestampFieldDate() {
+    super();
+  }
+
+  protected long getDateField(long days) {
+    calendar.setTimeInMillis(DateWritable.daysToMillis((int) days));
+    return calendar.get(field);
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    Preconditions.checkState(inputTypes[0] == VectorExpression.Type.DATE);
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn];
+    ColumnVector inputColVec = batch.cols[this.colNum];
+
+    /* every line below this is identical for evaluateLong & evaluateString */
+    final int n = inputColVec.isRepeating ? 1 : batch.size;
+    int[] sel = batch.selected;
+    final boolean selectedInUse = (inputColVec.isRepeating == false) && batch.selectedInUse;
+
+    if(batch.size == 0) {
+      /* n != batch.size when isRepeating */
+      return;
+    }
+
+    /* true for all algebraic UDFs with no state */
+    outV.isRepeating = inputColVec.isRepeating;
+
+    LongColumnVector longColVector = (LongColumnVector) inputColVec;
+
+    if (inputColVec.noNulls) {
+      outV.noNulls = true;
+      if (selectedInUse) {
+        for(int j=0; j < n; j++) {
+          int i = sel[j];
+          outV.vector[i] = getDateField(longColVector.vector[i]);
+        }
+      } else {
+        for(int i = 0; i < n; i++) {
+          outV.vector[i] = getDateField(longColVector.vector[i]);
+        }
+      }
+    } else {
+      // Handle case with nulls. Don't do function if the value is null, to save time,
+      // because calling the function can be expensive.
+      outV.noNulls = false;
+      if (selectedInUse) {
+        for(int j=0; j < n; j++) {
+          int i = sel[j];
+          outV.isNull[i] = inputColVec.isNull[i];
+          if (!inputColVec.isNull[i]) {
+            outV.vector[i] = getDateField(longColVector.vector[i]);
+          }
+        }
+      } else {
+        for(int i = 0; i < n; i++) {
+          outV.isNull[i] = inputColVec.isNull[i];
+          if (!inputColVec.isNull[i]) {
+            outV.vector[i] = getDateField(longColVector.vector[i]);
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return this.outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public int getField() {
+    return field;
+  }
+
+  public void setField(int field) {
+    this.field = field;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.DATE)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldLong.java
deleted file mode 100644
index 1cda0a9..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldLong.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.sql.Timestamp;
-import java.util.Calendar;
-
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-/**
- * Abstract class to return various fields from a Timestamp or Date.
- */
-public abstract class VectorUDFTimestampFieldLong extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  protected int colNum;
-  protected int outputColumn;
-  protected int field;
-  protected transient final Calendar calendar = Calendar.getInstance();
-  protected transient final Timestamp ts = new Timestamp(0);
-
-  public VectorUDFTimestampFieldLong(int field, int colNum, int outputColumn) {
-    this();
-    this.colNum = colNum;
-    this.outputColumn = outputColumn;
-    this.field = field;
-  }
-
-  public VectorUDFTimestampFieldLong() {
-    super();
-  }
-
-  protected final Timestamp getTimestamp(long nanos) {
-    /*
-     * new Timestamp() stores the millisecond precision values in the nanos field.
-     * If you wanted to store 200ms it will result in nanos being set to 200*1000*1000.
-     * When you call setNanos(0), because there are no sub-ms times, it will set it to 0,
-     * ending up with a Timestamp which refers to 0ms by accident.
-     * CAVEAT: never use a sub-second value in new Timestamp() args, just use setNanos to set it.
-     */
-    long ms = (nanos / (1000 * 1000 * 1000)) * 1000;
-    /* the milliseconds should be kept in nanos */
-    long ns = nanos % (1000*1000*1000);
-    if (ns < 0) {
-      /*
-       * Due to the way java.sql.Timestamp stores sub-second values, it throws an exception
-       * if nano seconds are negative. The timestamp implementation handles this by using
-       * negative milliseconds and adjusting the nano seconds up by the same to be positive.
-       * Read Timestamp.java:setTime() implementation for this code.
-       */
-      ms -= 1000;
-      ns += 1000*1000*1000;
-    }
-    ts.setTime(ms);
-    ts.setNanos((int) ns);
-    return ts;
-  }
-
-  protected long getTimestampField(long time) {
-    calendar.setTime(getTimestamp(time));
-    return calendar.get(field);
-  }
-
-  protected long getDateField(long days) {
-    calendar.setTimeInMillis(DateWritable.daysToMillis((int) days));
-    return calendar.get(field);
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-        super.evaluateChildren(batch);
-      }
-
-    LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn];
-    LongColumnVector inputCol = (LongColumnVector)batch.cols[this.colNum];
-    /* every line below this is identical for evaluateLong & evaluateString */
-    final int n = inputCol.isRepeating ? 1 : batch.size;
-    int[] sel = batch.selected;
-
-    if(batch.size == 0) {
-      /* n != batch.size when isRepeating */
-      return;
-    }
-
-    /* true for all algebraic UDFs with no state */
-    outV.isRepeating = inputCol.isRepeating;
-
-    switch (inputTypes[0]) {
-      case TIMESTAMP:
-        if (inputCol.noNulls) {
-          outV.noNulls = true;
-          if (batch.selectedInUse) {
-            for(int j=0; j < n; j++) {
-              int i = sel[j];
-              outV.vector[i] = getTimestampField(inputCol.vector[i]);
-            }
-          } else {
-            for(int i = 0; i < n; i++) {
-              outV.vector[i] = getTimestampField(inputCol.vector[i]);
-            }
-          }
-        } else {
-          // Handle case with nulls. Don't do function if the value is null, to save time,
-          // because calling the function can be expensive.
-          outV.noNulls = false;
-          if (batch.selectedInUse) {
-            for(int j=0; j < n; j++) {
-              int i = sel[j];
-              outV.isNull[i] = inputCol.isNull[i];
-              if (!inputCol.isNull[i]) {
-                outV.vector[i] = getTimestampField(inputCol.vector[i]);
-              }
-            }
-          } else {
-            for(int i = 0; i < n; i++) {
-              outV.isNull[i] = inputCol.isNull[i];
-              if (!inputCol.isNull[i]) {
-                outV.vector[i] = getTimestampField(inputCol.vector[i]);
-              }
-            }
-          }
-        }
-        break;
-
-      case DATE:
-        if (inputCol.noNulls) {
-          outV.noNulls = true;
-          if (batch.selectedInUse) {
-            for(int j=0; j < n; j++) {
-              int i = sel[j];
-              outV.vector[i] = getDateField(inputCol.vector[i]);
-            }
-          } else {
-            for(int i = 0; i < n; i++) {
-              outV.vector[i] = getDateField(inputCol.vector[i]);
-            }
-          }
-        } else {
-          // Handle case with nulls. Don't do function if the value is null, to save time,
-          // because calling the function can be expensive.
-          outV.noNulls = false;
-          if (batch.selectedInUse) {
-            for(int j=0; j < n; j++) {
-              int i = sel[j];
-              outV.isNull[i] = inputCol.isNull[i];
-              if (!inputCol.isNull[i]) {
-                outV.vector[i] = getDateField(inputCol.vector[i]);
-              }
-            }
-          } else {
-            for(int i = 0; i < n; i++) {
-              outV.isNull[i] = inputCol.isNull[i];
-              if (!inputCol.isNull[i]) {
-                outV.vector[i] = getDateField(inputCol.vector[i]);
-              }
-            }
-          }
-        }
-        break;
-      default:
-        throw new Error("Unsupported input type " + inputTypes[0].name());
-    }
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return this.outputColumn;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "long";
-  }
-
-  public int getColNum() {
-    return colNum;
-  }
-
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public int getField() {
-    return field;
-  }
-
-  public void setField(int field) {
-    this.field = field;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
-    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.DATETIME_FAMILY)
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN);
-    return b.build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
index af96988..45e7a31 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
@@ -87,6 +87,7 @@ public abstract class VectorUDFTimestampFieldString extends VectorExpression {
 
     final int n = inputCol.isRepeating ? 1 : batch.size;
     int[] sel = batch.selected;
+    final boolean selectedInUse = (inputCol.isRepeating == false) && batch.selectedInUse;
 
     if (batch.size == 0) {
 
@@ -99,7 +100,7 @@ public abstract class VectorUDFTimestampFieldString extends VectorExpression {
 
     if (inputCol.noNulls) {
       outV.noNulls = true;
-      if (batch.selectedInUse) {
+      if (selectedInUse) {
         for (int j = 0; j < n; j++) {
           int i = sel[j];
           try {
@@ -126,7 +127,7 @@ public abstract class VectorUDFTimestampFieldString extends VectorExpression {
       // Handle case with nulls. Don't do function if the value is null, to save time,
       // because calling the function can be expensive.
       outV.noNulls = false;
-      if (batch.selectedInUse) {
+      if (selectedInUse) {
         for (int j = 0; j < n; j++) {
           int i = sel[j];
           outV.isNull[i] = inputCol.isNull[i];

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
new file mode 100644
index 0000000..5fca678
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Abstract class to return various fields from a Timestamp.
+ */
+public abstract class VectorUDFTimestampFieldTimestamp extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  protected int colNum;
+  protected int outputColumn;
+  protected int field;
+  protected transient final Calendar calendar = Calendar.getInstance();
+
+  public VectorUDFTimestampFieldTimestamp(int field, int colNum, int outputColumn) {
+    this();
+    this.colNum = colNum;
+    this.outputColumn = outputColumn;
+    this.field = field;
+  }
+
+  public VectorUDFTimestampFieldTimestamp() {
+    super();
+  }
+
+  protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) {
+    calendar.setTime(timestampColVector.asScratchTimestamp(elementNum));
+    return calendar.get(field);
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    Preconditions.checkState(inputTypes[0] == VectorExpression.Type.TIMESTAMP);
+
+    if (childExpressions != null) {
+        super.evaluateChildren(batch);
+      }
+
+    LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn];
+    ColumnVector inputColVec = batch.cols[this.colNum];
+
+    /* every line below this is identical for evaluateLong & evaluateString */
+    final int n = inputColVec.isRepeating ? 1 : batch.size;
+    int[] sel = batch.selected;
+    final boolean selectedInUse = (inputColVec.isRepeating == false) && batch.selectedInUse;
+
+    if(batch.size == 0) {
+      /* n != batch.size when isRepeating */
+      return;
+    }
+
+    /* true for all algebraic UDFs with no state */
+    outV.isRepeating = inputColVec.isRepeating;
+
+    TimestampColumnVector timestampColVector = (TimestampColumnVector) inputColVec;
+
+    if (inputColVec.noNulls) {
+      outV.noNulls = true;
+      if (selectedInUse) {
+        for(int j=0; j < n; j++) {
+          int i = sel[j];
+          outV.vector[i] = getTimestampField(timestampColVector, i);
+        }
+      } else {
+        for(int i = 0; i < n; i++) {
+          outV.vector[i] = getTimestampField(timestampColVector, i);
+        }
+      }
+    } else {
+      // Handle case with nulls. Don't do function if the value is null, to save time,
+      // because calling the function can be expensive.
+      outV.noNulls = false;
+      if (selectedInUse) {
+        for(int j=0; j < n; j++) {
+          int i = sel[j];
+          outV.isNull[i] = inputColVec.isNull[i];
+          if (!inputColVec.isNull[i]) {
+            outV.vector[i] = getTimestampField(timestampColVector, i);
+          }
+        }
+      } else {
+        for(int i = 0; i < n; i++) {
+          outV.isNull[i] = inputColVec.isNull[i];
+          if (!inputColVec.isNull[i]) {
+            outV.vector[i] = getTimestampField(timestampColVector, i);
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return this.outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  public int getField() {
+    return field;
+  }
+
+  public void setField(int field) {
+    this.field = field;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.TIMESTAMP)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
new file mode 100644
index 0000000..3c693af
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Return Unix Timestamp.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFUnixTimeStampDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  private DateWritable dateWritable;
+
+  @Override
+  protected long getDateField(long days) {
+    dateWritable.set((int) days);
+    return dateWritable.getTimeInSeconds();
+  }
+
+  public VectorUDFUnixTimeStampDate(int colNum, int outputColumn) {
+    /* not a real field */
+    super(-1, colNum, outputColumn);
+    dateWritable = new DateWritable();
+  }
+
+  public VectorUDFUnixTimeStampDate() {
+    super();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampLong.java
deleted file mode 100644
index 6df68f0..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampLong.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-/**
- * Return Unix Timestamp.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFUnixTimeStampLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  @Override
-  protected long getTimestampField(long time) {
-    long ms = (time / (1000*1000*1000)) * 1000;
-    long remainder = time % (1000*1000*1000);
-    /* negative timestamps need to be adjusted */
-    if(remainder < 0) {
-      ms -= 1000;
-    }
-    return ms / 1000;
-  }
-
-  @Override
-  protected long getDateField(long days) {
-    long ms = DateWritable.daysToMillis((int) days);
-    return ms / 1000;
-  }
-
-  public VectorUDFUnixTimeStampLong(int colNum, int outputColumn) {
-    /* not a real field */
-    super(-1, colNum, outputColumn);
-  }
-
-  public VectorUDFUnixTimeStampLong() {
-    super();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java
new file mode 100644
index 0000000..2bd7756
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampTimestamp.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+
+/**
+ * Return Unix Timestamp.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFUnixTimeStampTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  protected long getTimestampField(TimestampColumnVector timestampColVector, int elementNum) {
+    return timestampColVector.asScratchTimestamp(elementNum).getTime() / 1000;
+  }
+
+  public VectorUDFUnixTimeStampTimestamp(int colNum, int outputColumn) {
+    /* not a real field */
+    super(-1, colNum, outputColumn);
+  }
+
+  public VectorUDFUnixTimeStampTimestamp() {
+    super();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearDate.java
new file mode 100644
index 0000000..8e8f125
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearDate.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get week of year.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFWeekOfYearDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFWeekOfYearDate(int colNum, int outputColumn) {
+    super(Calendar.WEEK_OF_YEAR, colNum, outputColumn);
+    initCalendar();
+  }
+
+  public VectorUDFWeekOfYearDate() {
+    super();
+    initCalendar();
+  }
+
+  private void initCalendar() {
+    /* code copied over from UDFWeekOfYear implementation */
+    calendar.setFirstDayOfWeek(Calendar.MONDAY);
+    calendar.setMinimalDaysInFirstWeek(4);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearLong.java
deleted file mode 100644
index 1ebadda..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearLong.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Calendar;
-
-/**
- * Expression to get week of year.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFWeekOfYearLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-
-  public VectorUDFWeekOfYearLong(int colNum, int outputColumn) {
-    super(Calendar.WEEK_OF_YEAR, colNum, outputColumn);
-    initCalendar();
-  }
-
-  public VectorUDFWeekOfYearLong() {
-    super();
-    initCalendar();
-  }
-
-  private void initCalendar() {
-    /* code copied over from UDFWeekOfYear implementation */
-    calendar.setFirstDayOfWeek(Calendar.MONDAY);
-    calendar.setMinimalDaysInFirstWeek(4);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearTimestamp.java
new file mode 100644
index 0000000..4b9c26b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFWeekOfYearTimestamp.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get week of year.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFWeekOfYearTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFWeekOfYearTimestamp(int colNum, int outputColumn) {
+    super(Calendar.WEEK_OF_YEAR, colNum, outputColumn);
+    initCalendar();
+  }
+
+  public VectorUDFWeekOfYearTimestamp() {
+    super();
+    initCalendar();
+  }
+
+  private void initCalendar() {
+    /* code copied over from UDFWeekOfYear implementation */
+    calendar.setFirstDayOfWeek(Calendar.MONDAY);
+    calendar.setMinimalDaysInFirstWeek(4);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearDate.java
new file mode 100644
index 0000000..a2d098d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearDate.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get year as a long.
+ * Extends {@link VectorUDFTimestampFieldDate}
+ */
+public final class VectorUDFYearDate extends VectorUDFTimestampFieldDate {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFYearDate(int colNum, int outputColumn) {
+    super(Calendar.YEAR, colNum, outputColumn);
+  }
+
+  public VectorUDFYearDate() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearLong.java
deleted file mode 100644
index 41c9d5b..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearLong.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.Arrays;
-import java.util.Calendar;
-
-/**
- * Expression to get year as a long.
- * Extends {@link VectorUDFTimestampFieldLong}
- */
-public final class VectorUDFYearLong extends VectorUDFTimestampFieldLong {
-
-  private static final long serialVersionUID = 1L;
-  /* year boundaries in nanoseconds */
-  private static transient final long[] YEAR_BOUNDARIES;
-  private static transient final int MIN_YEAR = 1678;
-  private static transient final int MAX_YEAR = 2300;
-
-  static {
-    YEAR_BOUNDARIES = new long[MAX_YEAR-MIN_YEAR];
-    Calendar c = Calendar.getInstance();
-    c.setTimeInMillis(0); // c.set doesn't reset millis
-    /* 1901 Jan is not with in range */
-    for(int year=MIN_YEAR+1; year <= MAX_YEAR; year++) {
-      c.set(year, Calendar.JANUARY, 1, 0, 0, 0);
-      YEAR_BOUNDARIES[year-MIN_YEAR-1] = c.getTimeInMillis()*1000*1000;
-    }
-  }
-
-  @Override
-  protected long getTimestampField(long time) {
-    /* binarySearch is faster than a loop doing a[i] (no array out of bounds checks) */
-    int year = Arrays.binarySearch(YEAR_BOUNDARIES, time);
-    if(year >= 0) {
-      /* 0 == 1902 etc */
-      return MIN_YEAR + 1 + year;
-    } else {
-      /* -1 == 1901, -2 == 1902 */
-      return MIN_YEAR - 1 - year;
-    }
-  }
-
-  public VectorUDFYearLong(int colNum, int outputColumn) {
-    super(Calendar.YEAR, colNum, outputColumn);
-  }
-
-  public VectorUDFYearLong() {
-    super();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearTimestamp.java
new file mode 100644
index 0000000..f418bb3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFYearTimestamp.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Calendar;
+
+/**
+ * Expression to get year as a long.
+ * Extends {@link VectorUDFTimestampFieldTimestamp}
+ */
+public final class VectorUDFYearTimestamp extends VectorUDFTimestampFieldTimestamp {
+
+  private static final long serialVersionUID = 1L;
+
+  public VectorUDFYearTimestamp(int colNum, int outputColumn) {
+    super(Calendar.YEAR, colNum, outputColumn);
+  }
+
+  public VectorUDFYearTimestamp() {
+    super();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgTimestamp.java
new file mode 100644
index 0000000..d0a1d0d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgTimestamp.java
@@ -0,0 +1,482 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ * Generated from template VectorUDAFAvg.txt.
+ */
+@Description(name = "avg",
+    value = "_FUNC_(expr) - Returns the average value of expr (vectorized, type: timestamp)")
+public class VectorUDAFAvgTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /** class for storing the current aggregate value. */
+    static class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+
+      /**
+      * Value is explicitly (re)initialized in reset()
+      */
+      transient private boolean isNull = true;
+
+      public void sumValue(double value) {
+        if (isNull) {
+          sum = value;
+          count = 1;
+          isNull = false;
+        } else {
+          sum += value;
+          count++;
+        }
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0L;
+      }
+    }
+    
+    private VectorExpression inputExpression;
+    transient private Object[] partialResult;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private StructObjectInspector soi;
+
+    public VectorUDAFAvgTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFAvgTimestamp() {
+      super();
+      partialResult = new Object[2];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      initPartialResultInspector();
+    }
+
+    private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int bufferIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(bufferIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      if (inputColVector.noNulls) {
+        if (inputColVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, bufferIndex,
+            inputColVector.getDouble(0),
+            batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batchSize);
+          }
+        }
+      } else {
+        if (inputColVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector.getDouble(0), batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector.getDouble(0), batchSize, inputColVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batchSize, inputColVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      double value,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.sumValue(value);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.sumValue(
+            inputColVector.getDouble(selection[i]));
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.sumValue(inputColVector.getDouble(i));
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      double value,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[selection[i]]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.sumValue(value);
+        }
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      double value,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets, 
+            bufferIndex,
+            i);
+          myagg.sumValue(value);
+        }
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int j=0; j < batchSize; ++j) {
+        int i = selection[j];
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            j);
+          myagg.sumValue(inputColVector.getDouble(i));
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.sumValue(inputColVector.getDouble(i));
+        }
+      }
+   }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+        throws HiveException {
+
+        inputExpression.evaluate(batch);
+
+        TimestampColumnVector inputColVector = 
+            (TimestampColumnVector)batch.cols[this.inputExpression.getOutputColumn()];
+
+        int batchSize = batch.size;
+
+        if (batchSize == 0) {
+          return;
+        }
+
+        Aggregation myagg = (Aggregation)agg;
+        
+        if (inputColVector.isRepeating) {
+          if (inputColVector.noNulls) {
+            if (myagg.isNull) {
+              myagg.isNull = false;
+              myagg.sum = 0;
+              myagg.count = 0;
+            }
+            myagg.sum += inputColVector.getDouble(0)*batchSize;
+            myagg.count += batchSize;
+          }
+          return;
+        }
+
+        if (!batch.selectedInUse && inputColVector.noNulls) {
+          iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+        }
+        else if (!batch.selectedInUse) {
+          iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+        }
+        else if (inputColVector.noNulls){
+          iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+        }
+        else {
+          iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+        }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg, 
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.sum = 0;
+            myagg.count = 0;
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.sum = 0;
+        myagg.count = 0;
+      }
+
+      for (int i=0; i< batchSize; ++i) {
+        double value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.sum = 0;
+            myagg.count = 0;
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector, 
+        int batchSize) {
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.sum = 0;
+        myagg.count = 0;
+      }
+
+      for (int i=0;i<batchSize;++i) {
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        return partialResult;
+      }
+    }
+
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+    return soi;
+  }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2() * 2,
+      model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdPopTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdPopTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdPopTimestamp.java
new file mode 100644
index 0000000..fa25e6a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdPopTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFStdPopTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "std,stddev,stddev_pop",
+    value = "_FUNC_(x) - Returns the standard deviation of a set of numbers (vectorized, timestamp)")
+public class VectorUDAFStdPopTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFStdPopTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFStdPopTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+


[11/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
new file mode 100644
index 0000000..a9ca93c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+// A type date (LongColumnVector storing epoch days) minus a type date produces a
+// type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
+public class DateColSubtractDateScalar extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private Timestamp scratchTimestamp1;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public DateColSubtractDateScalar(int colNum, long value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
+    scratchTimestamp1 = new Timestamp(0);
+  }
+
+  public DateColSubtractDateScalar() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type date (epochDays).
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type HiveIntervalDayTime.
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
new file mode 100644
index 0000000..59cf9da
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+// A type date (LongColumnVector storing epoch days) minus a type date produces a
+// type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
+public class DateScalarSubtractDateColumn extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private Timestamp scratchTimestamp2;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public DateScalarSubtractDateColumn(long value, int colNum, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
+    scratchTimestamp2 = new Timestamp(0);
+  }
+
+  public DateScalarSubtractDateColumn() {
+  }
+
+  @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type date (epochDays).
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type HiveIntervalDayTime.
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    long[] vector2 = inputColVector2.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
new file mode 100644
index 0000000..25a276a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+import java.util.HashSet;
+
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Evaluate IN filter on a batch for a vector of timestamps.
+ */
+public class FilterTimestampColumnInList extends VectorExpression implements ITimestampInExpr {
+  private static final long serialVersionUID = 1L;
+  private int inputCol;
+  private Timestamp[] inListValues;
+
+  // The set object containing the IN list.
+  private transient HashSet<Timestamp> inSet;
+
+  public FilterTimestampColumnInList() {
+    super();
+    inSet = null;
+  }
+
+  /**
+   * After construction you must call setInListValues() to add the values to the IN set.
+   */
+  public FilterTimestampColumnInList(int colNum) {
+    this.inputCol = colNum;
+    inSet = null;
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    if (inSet == null) {
+      inSet = new HashSet<Timestamp>(inListValues.length);
+      for (Timestamp val : inListValues) {
+        inSet.add(val);
+      }
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[inputCol];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+
+        // All must be selected otherwise size would be zero
+        // Repeating property will not change.
+
+        if (!(inSet.contains(inputColVector.asScratchTimestamp(0)))) {
+          //Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!inSet.contains(inputColVector.asScratchTimestamp(0))) {
+
+            //Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+
+        // Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public Descriptor getDescriptor() {
+
+    // This VectorExpression (IN) is a special case, so don't return a descriptor.
+    return null;
+  }
+
+  public void setInListValues(Timestamp[] a) {
+    this.inListValues = a;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToTimestamp.java
new file mode 100644
index 0000000..561c152
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToTimestamp.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * This is a superclass for unary decimal functions and expressions returning timestamps that
+ * operate directly on the input and set the output.
+ */
+public abstract class FuncDecimalToTimestamp extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  int inputColumn;
+  int outputColumn;
+
+  public FuncDecimalToTimestamp(int inputColumn, int outputColumn) {
+    this.inputColumn = inputColumn;
+    this.outputColumn = outputColumn;
+  }
+
+  public FuncDecimalToTimestamp() {
+    super();
+  }
+
+  abstract protected void func(TimestampColumnVector outV, DecimalColumnVector inV, int i);
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    DecimalColumnVector inV = (DecimalColumnVector) batch.cols[inputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    TimestampColumnVector outV = (TimestampColumnVector) batch.cols[outputColumn];
+
+    if (n == 0) {
+
+      // Nothing to do
+      return;
+    }
+
+    if (inV.noNulls) {
+      outV.noNulls = true;
+      if (inV.isRepeating) {
+        outV.isRepeating = true;
+        func(outV, inV, 0);
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          func(outV, inV, i);
+        }
+        outV.isRepeating = false;
+      } else {
+        for(int i = 0; i != n; i++) {
+          func(outV, inV, i);
+        }
+        outV.isRepeating = false;
+      }
+    } else {
+
+      // Handle case with nulls. Don't do function if the value is null,
+      // because the data may be undefined for a null value.
+      outV.noNulls = false;
+      if (inV.isRepeating) {
+        outV.isRepeating = true;
+        outV.isNull[0] = inV.isNull[0];
+        if (!inV.isNull[0]) {
+          func(outV, inV, 0);
+        }
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outV.isNull[i] = inV.isNull[i];
+          if (!inV.isNull[i]) {
+            func(outV, inV, i);
+          }
+        }
+        outV.isRepeating = false;
+      } else {
+        System.arraycopy(inV.isNull, 0, outV.isNull, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!inV.isNull[i]) {
+            func(outV, inV, i);
+          }
+        }
+        outV.isRepeating = false;
+      }
+    }
+  }
+
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.DECIMAL)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToDecimal.java
new file mode 100644
index 0000000..774551c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToDecimal.java
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * This is a superclass for unary timestamp functions and expressions returning decimals that
+ * operate directly on the input and set the output.
+ */
+public abstract class FuncTimestampToDecimal extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  int inputColumn;
+  int outputColumn;
+
+  public FuncTimestampToDecimal(int inputColumn, int outputColumn) {
+    this.inputColumn = inputColumn;
+    this.outputColumn = outputColumn;
+    this.outputType = "decimal";
+  }
+
+  public FuncTimestampToDecimal() {
+    super();
+    this.outputType = "decimal";
+  }
+
+  abstract protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i);
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inV = (TimestampColumnVector) batch.cols[inputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    DecimalColumnVector outV = (DecimalColumnVector) batch.cols[outputColumn];
+
+    if (n == 0) {
+
+      // Nothing to do
+      return;
+    }
+
+    if (inV.noNulls) {
+      outV.noNulls = true;
+      if (inV.isRepeating) {
+        outV.isRepeating = true;
+        func(outV, inV, 0);
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          func(outV, inV, i);
+        }
+        outV.isRepeating = false;
+      } else {
+        for(int i = 0; i != n; i++) {
+          func(outV, inV, i);
+        }
+        outV.isRepeating = false;
+      }
+    } else {
+
+      // Handle case with nulls. Don't do function if the value is null,
+      // because the data may be undefined for a null value.
+      outV.noNulls = false;
+      if (inV.isRepeating) {
+        outV.isRepeating = true;
+        outV.isNull[0] = inV.isNull[0];
+        if (!inV.isNull[0]) {
+          func(outV, inV, 0);
+        }
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outV.isNull[i] = inV.isNull[i];
+          if (!inV.isNull[i]) {
+            func(outV, inV, i);
+          }
+        }
+        outV.isRepeating = false;
+      } else {
+        System.arraycopy(inV.isNull, 0, outV.isNull, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!inV.isNull[i]) {
+            func(outV, inV, i);
+          }
+        }
+        outV.isRepeating = false;
+      }
+    }
+  }
+
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  public int getInputColumn() {
+    return inputColumn;
+  }
+
+  public void setInputColumn(int inputColumn) {
+    this.inputColumn = inputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.TIMESTAMP)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToLong.java
new file mode 100644
index 0000000..b84d9be
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncTimestampToLong.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * This is a superclass for unary timestamp functions and expressions returning long that
+ * operate directly on the input and set the output.
+ */
+public abstract class FuncTimestampToLong extends VectorExpression {
+  private static final long serialVersionUID = 1L;
+  int inputColumn;
+  int outputColumn;
+
+  public FuncTimestampToLong(int inputColumn, int outputColumn) {
+    this.inputColumn = inputColumn;
+    this.outputColumn = outputColumn;
+    this.outputType = "long";
+  }
+
+  public FuncTimestampToLong() {
+    super();
+    this.outputType = "long";
+  }
+
+  abstract protected void func(LongColumnVector outV, TimestampColumnVector inV, int i);
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inV = (TimestampColumnVector) batch.cols[inputColumn];
+    int[] sel = batch.selected;
+    int n = batch.size;
+    LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn];
+
+    if (n == 0) {
+
+      // Nothing to do
+      return;
+    }
+
+    if (inV.noNulls) {
+      outV.noNulls = true;
+      if (inV.isRepeating) {
+        outV.isRepeating = true;
+        func(outV, inV, 0);
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          func(outV, inV, i);
+        }
+        outV.isRepeating = false;
+      } else {
+        for(int i = 0; i != n; i++) {
+          func(outV, inV, i);
+        }
+        outV.isRepeating = false;
+      }
+    } else {
+
+      // Handle case with nulls. Don't do function if the value is null,
+      // because the data may be undefined for a null value.
+      outV.noNulls = false;
+      if (inV.isRepeating) {
+        outV.isRepeating = true;
+        outV.isNull[0] = inV.isNull[0];
+        if (!inV.isNull[0]) {
+          func(outV, inV, 0);
+        }
+      } else if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outV.isNull[i] = inV.isNull[i];
+          if (!inV.isNull[i]) {
+            func(outV, inV, i);
+          }
+        }
+        outV.isRepeating = false;
+      } else {
+        System.arraycopy(inV.isNull, 0, outV.isNull, 0, n);
+        for(int i = 0; i != n; i++) {
+          if (!inV.isNull[i]) {
+            func(outV, inV, i);
+          }
+        }
+        outV.isRepeating = false;
+      }
+    }
+  }
+
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  public void setOutputColumn(int outputColumn) {
+    this.outputColumn = outputColumn;
+  }
+
+  public int getInputColumn() {
+    return inputColumn;
+  }
+
+  public void setInputColumn(int inputColumn) {
+    this.inputColumn = inputColumn;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
+    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.TIMESTAMP)
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN);
+    return b.build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java
new file mode 100644
index 0000000..f6cc971
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ITimestampInExpr.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+public interface ITimestampInExpr {
+  void setInListValues(Timestamp[] inVals);
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
new file mode 100644
index 0000000..804923e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second and third are long columns or long expression results.
+ */
+public class IfExprIntervalDayTimeColumnColumn extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column, arg2Column, arg3Column;
+  private int outputColumn;
+
+  public IfExprIntervalDayTimeColumnColumn(int arg1Column, int arg2Column, int arg3Column, int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Column = arg2Column;
+    this.arg3Column = arg3Column;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprIntervalDayTimeColumnColumn() {
+    super();
+  }
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector arg2ColVector = (IntervalDayTimeColumnVector) batch.cols[arg2Column];
+    IntervalDayTimeColumnVector arg3ColVector = (IntervalDayTimeColumnVector) batch.cols[arg3Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg2ColVector.noNulls && arg3ColVector.noNulls;
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    /* All the code paths below propagate nulls even if neither arg2 nor arg3
+     * have nulls. This is to reduce the number of code paths and shorten the
+     * code, at the expense of maybe doing unnecessary work if neither input
+     * has nulls. This could be improved in the future by expanding the number
+     * of code paths.
+     */
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      } else {
+        arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      }
+      return;
+    }
+
+    // extend any repeating values and noNulls indicator in the inputs
+    arg2ColVector.flatten(batch.selectedInUse, sel, n);
+    arg3ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg2ColVector.unFlatten();
+    arg3ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
new file mode 100644
index 0000000..8face7d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a column or non-constant expression result.
+ * The third is a constant value.
+ */
+public class IfExprIntervalDayTimeColumnScalar extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column, arg2Column;
+  private HiveIntervalDayTime arg3Scalar;
+  private int outputColumn;
+
+  public IfExprIntervalDayTimeColumnScalar(int arg1Column, int arg2Column, HiveIntervalDayTime arg3Scalar,
+      int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Column = arg2Column;
+    this.arg3Scalar = arg3Scalar;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprIntervalDayTimeColumnScalar() {
+    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector arg2ColVector = (IntervalDayTimeColumnVector) batch.cols[arg2Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg2ColVector.noNulls; // nulls can only come from arg2
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      } else {
+        outputColVector.fill(arg3Scalar);
+      }
+      return;
+    }
+
+    // Extend any repeating values and noNulls indicator in the inputs to
+    // reduce the number of code paths needed below.
+    arg2ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : false);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : false);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg2ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
new file mode 100644
index 0000000..40f2e08
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
@@ -0,0 +1,149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a column or non-constant expression result.
+ * The third is a constant value.
+ */
+public class IfExprIntervalDayTimeScalarColumn extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column, arg3Column;
+  private HiveIntervalDayTime arg2Scalar;
+  private int outputColumn;
+
+  public IfExprIntervalDayTimeScalarColumn(int arg1Column, HiveIntervalDayTime arg2Scalar, int arg3Column,
+      int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Scalar = arg2Scalar;
+    this.arg3Column = arg3Column;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprIntervalDayTimeScalarColumn() {
+    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector arg3ColVector = (IntervalDayTimeColumnVector) batch.cols[arg3Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg3ColVector.noNulls; // nulls can only come from arg3 column vector
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        outputColVector.fill(arg2Scalar);
+      } else {
+        arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      }
+      return;
+    }
+
+    // Extend any repeating values and noNulls indicator in the inputs to
+    // reduce the number of code paths needed below.
+    // This could be optimized in the future by having separate paths
+    // for when arg3ColVector is repeating or has no nulls.
+    arg3ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              false : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              false : arg3ColVector.isNull[i]);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg3ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
new file mode 100644
index 0000000..43676dd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.util.Arrays;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input  expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a constant value.
+ * The third is a constant value.
+ */
+public class IfExprIntervalDayTimeScalarScalar extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column;
+  private HiveIntervalDayTime arg2Scalar;
+  private HiveIntervalDayTime arg3Scalar;
+  private int outputColumn;
+
+  public IfExprIntervalDayTimeScalarScalar(int arg1Column, HiveIntervalDayTime arg2Scalar, HiveIntervalDayTime arg3Scalar,
+      int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Scalar = arg2Scalar;
+    this.arg3Scalar = arg3Scalar;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprIntervalDayTimeScalarScalar() {
+    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = false; // output is a scalar which we know is non null
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        outputColVector.fill(arg2Scalar);
+      } else {
+        outputColVector.fill(arg3Scalar);
+      }
+    } else if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3Scalar);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3Scalar);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3Scalar);
+          outputIsNull[i] = false;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3Scalar);
+        }
+        Arrays.fill(outputIsNull, 0, n, false);
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"),
+            VectorExpressionDescriptor.ArgumentType.getType("interval_day_time"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumn.java
new file mode 100644
index 0000000..a1e489b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumn.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second and third are long columns or long expression results.
+ */
+public class IfExprTimestampColumnColumn extends IfExprTimestampColumnColumnBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public IfExprTimestampColumnColumn(int arg1Column, int arg2Column, int arg3Column, int outputColumn) {
+    super(arg1Column, arg2Column, arg3Column, outputColumn);
+  }
+
+  public IfExprTimestampColumnColumn() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
new file mode 100644
index 0000000..8441863
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second and third are long columns or long expression results.
+ */
+public abstract class IfExprTimestampColumnColumnBase extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column, arg2Column, arg3Column;
+  private int outputColumn;
+
+  public IfExprTimestampColumnColumnBase(int arg1Column, int arg2Column, int arg3Column, int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Column = arg2Column;
+    this.arg3Column = arg3Column;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprTimestampColumnColumnBase() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    TimestampColumnVector arg2ColVector = (TimestampColumnVector) batch.cols[arg2Column];
+    TimestampColumnVector arg3ColVector = (TimestampColumnVector) batch.cols[arg3Column];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg2ColVector.noNulls && arg3ColVector.noNulls;
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    /* All the code paths below propagate nulls even if neither arg2 nor arg3
+     * have nulls. This is to reduce the number of code paths and shorten the
+     * code, at the expense of maybe doing unnecessary work if neither input
+     * has nulls. This could be improved in the future by expanding the number
+     * of code paths.
+     */
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      } else {
+        arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      }
+      return;
+    }
+
+    // extend any repeating values and noNulls indicator in the inputs
+    arg2ColVector.flatten(batch.selectedInUse, sel, n);
+    arg3ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
+          outputIsNull[i] = (vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
+          outputIsNull[i] = (vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg2ColVector.unFlatten();
+    arg3ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
new file mode 100644
index 0000000..ae997e0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a column or non-constant expression result.
+ * The third is a constant value.
+ */
+public class IfExprTimestampColumnScalar extends IfExprTimestampColumnScalarBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public IfExprTimestampColumnScalar(int arg1Column, int arg2Column, Timestamp arg3Scalar,
+      int outputColumn) {
+    super(arg1Column, arg2Column, arg3Scalar, outputColumn);
+  }
+
+  public IfExprTimestampColumnScalar() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
new file mode 100644
index 0000000..6b87ff2
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a column or non-constant expression result.
+ * The third is a constant value.
+ */
+public abstract class IfExprTimestampColumnScalarBase extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column, arg2Column;
+  private Timestamp arg3Scalar;
+  private int outputColumn;
+
+  public IfExprTimestampColumnScalarBase(int arg1Column, int arg2Column, Timestamp arg3Scalar,
+      int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Column = arg2Column;
+    this.arg3Scalar = arg3Scalar;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprTimestampColumnScalarBase() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    TimestampColumnVector arg2ColVector = (TimestampColumnVector) batch.cols[arg2Column];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg2ColVector.noNulls; // nulls can only come from arg2
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      } else {
+        outputColVector.fill(arg3Scalar);
+      }
+      return;
+    }
+
+    // Extend any repeating values and noNulls indicator in the inputs to
+    // reduce the number of code paths needed below.
+    arg2ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : false);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : false);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg2ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
new file mode 100644
index 0000000..3d53df1
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a column or non-constant expression result.
+ * The third is a constant value.
+ */
+public class IfExprTimestampScalarColumn extends IfExprTimestampScalarColumnBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public IfExprTimestampScalarColumn(int arg1Column, Timestamp arg2Scalar, int arg3Column,
+      int outputColumn) {
+    super(arg1Column, arg2Scalar, arg3Column, outputColumn);
+  }
+
+  public IfExprTimestampScalarColumn() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
new file mode 100644
index 0000000..2162f17
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a column or non-constant expression result.
+ * The third is a constant value.
+ */
+public abstract class IfExprTimestampScalarColumnBase extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int arg1Column, arg3Column;
+  private Timestamp arg2Scalar;
+  private int outputColumn;
+
+  public IfExprTimestampScalarColumnBase(int arg1Column, Timestamp arg2Scalar, int arg3Column,
+      int outputColumn) {
+    this.arg1Column = arg1Column;
+    this.arg2Scalar = arg2Scalar;
+    this.arg3Column = arg3Column;
+    this.outputColumn = outputColumn;
+  }
+
+  public IfExprTimestampScalarColumnBase() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    TimestampColumnVector arg3ColVector = (TimestampColumnVector) batch.cols[arg3Column];
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg3ColVector.noNulls; // nulls can only come from arg3 column vector
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        outputColVector.fill(arg2Scalar);
+      } else {
+        arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      }
+      return;
+    }
+
+    // Extend any repeating values and noNulls indicator in the inputs to
+    // reduce the number of code paths needed below.
+    // This could be optimized in the future by having separate paths
+    // for when arg3ColVector is repeating or has no nulls.
+    arg3ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchTimestamp(i));
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchTimestamp(i));
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3ColVector.asScratchTimestamp(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              false : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3ColVector.asScratchTimestamp(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              false : arg3ColVector.isNull[i]);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg3ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
new file mode 100644
index 0000000..cd00d3a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+import java.sql.Timestamp;
+
+/**
+ * Compute IF(expr1, expr2, expr3) for 3 input  expressions.
+ * The first is always a boolean (LongColumnVector).
+ * The second is a constant value.
+ * The third is a constant value.
+ */
+public class IfExprTimestampScalarScalar extends IfExprTimestampScalarScalarBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public IfExprTimestampScalarScalar(int arg1Column, Timestamp arg2Scalar, Timestamp arg3Scalar,
+      int outputColumn) {
+    super(arg1Column, arg2Scalar, arg3Scalar, outputColumn);
+  }
+
+  public IfExprTimestampScalarScalar() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(3)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("int_family"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}


[20/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere)
HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/130293e5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/130293e5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/130293e5

Branch: refs/heads/branch-1
Commit: 130293e5645b9029578d96dd2cc7e07bb1f76015
Parents: f42b984
Author: Matt McCline <mm...@hortonworks.com>
Authored: Tue Apr 19 03:12:15 2016 -0700
Committer: Matt McCline <mm...@hortonworks.com>
Committed: Tue Apr 19 03:12:15 2016 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ant/GenVectorCode.java   | 1445 ++++++++-----
 .../hive/common/type/HiveIntervalDayTime.java   |   39 +-
 .../hadoop/hive/common/type/RandomTypeUtil.java |  115 +
 .../org/apache/hive/common/util/DateUtils.java  |   17 -
 .../hive/common/util/IntervalDayTimeUtils.java  |   77 +
 data/files/timestamps.txt                       |   50 +
 .../test/resources/testconfiguration.properties |    4 +
 .../DTIColumnArithmeticDTIColumnNoConvert.txt   |    2 +
 .../DTIColumnArithmeticDTIScalarNoConvert.txt   |    2 +
 .../DTIColumnCompareScalar.txt                  |    2 +
 .../DTIScalarArithmeticDTIColumnNoConvert.txt   |    2 +
 .../DTIScalarCompareColumn.txt                  |    2 +
 ...eColumnArithmeticIntervalYearMonthColumn.txt |  197 ++
 ...eColumnArithmeticIntervalYearMonthScalar.txt |  156 ++
 .../DateColumnArithmeticTimestampColumn.txt     |  186 ++
 .../DateColumnArithmeticTimestampScalar.txt     |  154 ++
 ...eScalarArithmeticIntervalYearMonthColumn.txt |  170 ++
 .../DateScalarArithmeticTimestampColumn.txt     |  161 ++
 ...olumnArithmeticIntervalColumnWithConvert.txt |  175 --
 ...olumnArithmeticIntervalScalarWithConvert.txt |  152 --
 ...calarArithmeticIntervalColumnWithConvert.txt |  165 --
 .../FilterDTIColumnCompareScalar.txt            |    2 -
 .../FilterDecimalColumnCompareColumn.txt        |  445 ----
 .../FilterDecimalColumnCompareDecimalColumn.txt |  445 ++++
 .../FilterDecimalColumnCompareDecimalScalar.txt |  160 ++
 .../FilterDecimalColumnCompareScalar.txt        |  160 --
 .../FilterDecimalScalarCompareColumn.txt        |  160 --
 .../FilterDecimalScalarCompareDecimalColumn.txt |  160 ++
 ...erLongDoubleColumnCompareTimestampColumn.txt |  185 ++
 ...erLongDoubleColumnCompareTimestampScalar.txt |   59 +
 ...erLongDoubleScalarCompareTimestampColumn.txt |  169 ++
 .../FilterScalarCompareTimestampColumn.txt      |   55 -
 .../FilterTimestampColumnBetween.txt            |  171 ++
 ...erTimestampColumnCompareLongDoubleColumn.txt |  182 ++
 ...erTimestampColumnCompareLongDoubleScalar.txt |  165 ++
 .../FilterTimestampColumnCompareScalar.txt      |   56 -
 ...terTimestampColumnCompareTimestampColumn.txt |  451 ++++
 ...terTimestampColumnCompareTimestampScalar.txt |  132 +-
 ...erTimestampScalarCompareLongDoubleColumn.txt |   58 +
 ...terTimestampScalarCompareTimestampColumn.txt |  133 +-
 ...olumnArithmeticDateTimeColumnWithConvert.txt |  177 --
 ...olumnArithmeticDateTimeScalarWithConvert.txt |  154 --
 ...calarArithmeticDateTimeColumnWithConvert.txt |  167 --
 ...ervalYearMonthColumnArithmeticDateColumn.txt |  196 ++
 ...ervalYearMonthColumnArithmeticDateScalar.txt |  156 ++
 ...YearMonthColumnArithmeticTimestampColumn.txt |  186 ++
 ...YearMonthColumnArithmeticTimestampScalar.txt |  154 ++
 ...ervalYearMonthScalarArithmeticDateColumn.txt |  170 ++
 ...YearMonthScalarArithmeticTimestampColumn.txt |  158 ++
 .../LongDoubleColumnCompareTimestampColumn.txt  |  154 ++
 .../LongDoubleColumnCompareTimestampScalar.txt  |  146 ++
 .../LongDoubleScalarCompareTimestampColumn.txt  |  144 ++
 .../ScalarCompareTimestampColumn.txt            |   63 -
 .../TimestampColumnArithmeticDateColumn.txt     |  187 ++
 .../TimestampColumnArithmeticDateScalar.txt     |  147 ++
 ...pColumnArithmeticIntervalYearMonthColumn.txt |  186 ++
 ...pColumnArithmeticIntervalYearMonthScalar.txt |  143 ++
 ...TimestampColumnArithmeticTimestampColumn.txt |  177 ++
 ...TimestampColumnArithmeticTimestampScalar.txt |  145 ++
 .../TimestampColumnCompareLongDoubleColumn.txt  |  153 ++
 .../TimestampColumnCompareLongDoubleScalar.txt  |  144 ++
 .../TimestampColumnCompareScalar.txt            |   56 -
 .../TimestampColumnCompareTimestampColumn.txt   |  158 ++
 .../TimestampColumnCompareTimestampScalar.txt   |  118 +-
 .../TimestampScalarArithmeticDateColumn.txt     |  168 ++
 ...pScalarArithmeticIntervalYearMonthColumn.txt |  167 ++
 ...TimestampScalarArithmeticTimestampColumn.txt |  158 ++
 .../TimestampScalarCompareLongDoubleColumn.txt  |   58 +
 .../TimestampScalarCompareTimestampColumn.txt   |  119 +-
 ...runcStringScalarCompareStringGroupColumn.txt |    2 +-
 .../VectorUDAFMinMaxIntervalDayTime.txt         |  454 ++++
 .../UDAFTemplates/VectorUDAFMinMaxTimestamp.txt |  456 ++++
 .../hive/ql/exec/vector/ColumnVector.java       |    3 +-
 .../vector/IntervalDayTimeColumnVector.java     |  348 +++
 .../ql/exec/vector/TimestampColumnVector.java   |  395 ++++
 .../hive/ql/exec/vector/TimestampUtils.java     |   52 +-
 .../hive/ql/exec/vector/VectorAssignRow.java    |   53 +-
 .../exec/vector/VectorColumnAssignFactory.java  |   38 +-
 .../ql/exec/vector/VectorColumnSetInfo.java     |   85 +-
 .../hive/ql/exec/vector/VectorCopyRow.java      |  113 +-
 .../ql/exec/vector/VectorDeserializeRow.java    |   33 +-
 .../exec/vector/VectorExpressionDescriptor.java |   24 +-
 .../hive/ql/exec/vector/VectorExtractRow.java   |   56 +-
 .../ql/exec/vector/VectorGroupKeyHelper.java    |   26 +-
 .../ql/exec/vector/VectorHashKeyWrapper.java    |  109 +-
 .../exec/vector/VectorHashKeyWrapperBatch.java  |  259 ++-
 .../hive/ql/exec/vector/VectorSerializeRow.java |   22 +-
 .../exec/vector/VectorSerializeRowNoNulls.java  |   13 +-
 .../ql/exec/vector/VectorizationContext.java    |  104 +-
 .../ql/exec/vector/VectorizedBatchUtil.java     |   39 +-
 .../ql/exec/vector/VectorizedRowBatchCtx.java   |   18 +-
 .../expressions/CastDecimalToTimestamp.java     |   19 +-
 .../expressions/CastDoubleToTimestamp.java      |  126 ++
 .../exec/vector/expressions/CastLongToDate.java |   58 +-
 .../vector/expressions/CastLongToTimestamp.java |  126 ++
 .../CastMillisecondsLongToTimestamp.java        |  126 ++
 .../CastStringToIntervalDayTime.java            |    9 +-
 .../expressions/CastTimestampToBoolean.java     |  138 ++
 .../vector/expressions/CastTimestampToDate.java |   49 +
 .../expressions/CastTimestampToDecimal.java     |   15 +-
 .../expressions/CastTimestampToDouble.java      |  131 ++
 .../vector/expressions/CastTimestampToLong.java |  133 ++
 .../expressions/ConstantVectorExpression.java   |  106 +-
 .../expressions/DateColSubtractDateColumn.java  |  185 ++
 .../expressions/DateColSubtractDateScalar.java  |  147 ++
 .../DateScalarSubtractDateColumn.java           |  150 ++
 .../FilterTimestampColumnInList.java            |  172 ++
 .../expressions/FuncDecimalToTimestamp.java     |  135 ++
 .../expressions/FuncTimestampToDecimal.java     |  144 ++
 .../vector/expressions/FuncTimestampToLong.java |  145 ++
 .../vector/expressions/ITimestampInExpr.java    |   25 +
 .../IfExprIntervalDayTimeColumnColumn.java      |  153 ++
 .../IfExprIntervalDayTimeColumnScalar.java      |  147 ++
 .../IfExprIntervalDayTimeScalarColumn.java      |  149 ++
 .../IfExprIntervalDayTimeScalarScalar.java      |  137 ++
 .../IfExprTimestampColumnColumn.java            |   54 +
 .../IfExprTimestampColumnColumnBase.java        |  136 ++
 .../IfExprTimestampColumnScalar.java            |   59 +
 .../IfExprTimestampColumnScalarBase.java        |  133 ++
 .../IfExprTimestampScalarColumn.java            |   59 +
 .../IfExprTimestampScalarColumnBase.java        |  134 ++
 .../IfExprTimestampScalarScalar.java            |   59 +
 .../IfExprTimestampScalarScalarBase.java        |  120 ++
 .../ql/exec/vector/expressions/NullUtil.java    |   84 +-
 .../expressions/TimestampColumnInList.java      |  153 ++
 .../expressions/TimestampToStringUnaryUDF.java  |  147 ++
 .../vector/expressions/VectorExpression.java    |    2 +
 .../expressions/VectorExpressionWriter.java     |   10 +-
 .../VectorExpressionWriterFactory.java          |  275 ++-
 .../expressions/VectorUDFDateAddColCol.java     |    5 +-
 .../expressions/VectorUDFDateAddColScalar.java  |   10 +-
 .../expressions/VectorUDFDateAddScalarCol.java  |   13 +-
 .../expressions/VectorUDFDateDiffColCol.java    |   93 +-
 .../expressions/VectorUDFDateDiffColScalar.java |   26 +-
 .../expressions/VectorUDFDateDiffScalarCol.java |   28 +-
 .../vector/expressions/VectorUDFDateLong.java   |    5 +-
 .../expressions/VectorUDFDateTimestamp.java     |   74 +
 .../expressions/VectorUDFDayOfMonthDate.java    |   38 +
 .../expressions/VectorUDFDayOfMonthLong.java    |   38 -
 .../VectorUDFDayOfMonthTimestamp.java           |   38 +
 .../vector/expressions/VectorUDFHourDate.java   |   39 +
 .../vector/expressions/VectorUDFHourLong.java   |   39 -
 .../expressions/VectorUDFHourTimestamp.java     |   39 +
 .../vector/expressions/VectorUDFMinuteDate.java |   38 +
 .../vector/expressions/VectorUDFMinuteLong.java |   38 -
 .../expressions/VectorUDFMinuteTimestamp.java   |   38 +
 .../vector/expressions/VectorUDFMonthDate.java  |   44 +
 .../vector/expressions/VectorUDFMonthLong.java  |   50 -
 .../expressions/VectorUDFMonthTimestamp.java    |   46 +
 .../vector/expressions/VectorUDFSecondDate.java |   38 +
 .../vector/expressions/VectorUDFSecondLong.java |   38 -
 .../expressions/VectorUDFSecondTimestamp.java   |   38 +
 .../VectorUDFTimestampFieldDate.java            |  163 ++
 .../VectorUDFTimestampFieldLong.java            |  228 --
 .../VectorUDFTimestampFieldString.java          |    5 +-
 .../VectorUDFTimestampFieldTimestamp.java       |  162 ++
 .../expressions/VectorUDFUnixTimeStampDate.java |   49 +
 .../expressions/VectorUDFUnixTimeStampLong.java |   57 -
 .../VectorUDFUnixTimeStampTimestamp.java        |   45 +
 .../expressions/VectorUDFWeekOfYearDate.java    |   46 +
 .../expressions/VectorUDFWeekOfYearLong.java    |   46 -
 .../VectorUDFWeekOfYearTimestamp.java           |   46 +
 .../vector/expressions/VectorUDFYearDate.java   |   38 +
 .../vector/expressions/VectorUDFYearLong.java   |   67 -
 .../expressions/VectorUDFYearTimestamp.java     |   38 +
 .../aggregates/VectorUDAFAvgTimestamp.java      |  482 +++++
 .../aggregates/VectorUDAFStdPopTimestamp.java   |  527 +++++
 .../aggregates/VectorUDAFStdSampTimestamp.java  |  527 +++++
 .../aggregates/VectorUDAFVarPopTimestamp.java   |  527 +++++
 .../aggregates/VectorUDAFVarSampTimestamp.java  |  527 +++++
 .../ql/exec/vector/udf/VectorUDFAdaptor.java    |   13 +-
 .../hive/ql/io/orc/TreeReaderFactory.java       |   18 +-
 .../hadoop/hive/ql/io/orc/TypeDescription.java  |    4 +-
 .../hadoop/hive/ql/udf/UDFDayOfMonth.java       |    5 +-
 .../org/apache/hadoop/hive/ql/udf/UDFHour.java  |    5 +-
 .../apache/hadoop/hive/ql/udf/UDFMinute.java    |    5 +-
 .../org/apache/hadoop/hive/ql/udf/UDFMonth.java |    7 +-
 .../apache/hadoop/hive/ql/udf/UDFSecond.java    |    5 +-
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToByte.java    |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToDouble.java  |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToFloat.java   |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToInteger.java |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToLong.java    |    4 +-
 .../apache/hadoop/hive/ql/udf/UDFToShort.java   |    4 +-
 .../hadoop/hive/ql/udf/UDFWeekOfYear.java       |    5 +-
 .../org/apache/hadoop/hive/ql/udf/UDFYear.java  |    5 +-
 .../hive/ql/udf/generic/GenericUDFDate.java     |    3 +-
 .../hive/ql/udf/generic/GenericUDFIf.java       |   18 +-
 .../hive/ql/udf/generic/GenericUDFOPEqual.java  |   32 +-
 .../generic/GenericUDFOPEqualOrGreaterThan.java |   32 +-
 .../generic/GenericUDFOPEqualOrLessThan.java    |   31 +-
 .../ql/udf/generic/GenericUDFOPGreaterThan.java |   31 +-
 .../ql/udf/generic/GenericUDFOPLessThan.java    |   31 +-
 .../hive/ql/udf/generic/GenericUDFOPMinus.java  |    3 +
 .../ql/udf/generic/GenericUDFOPNotEqual.java    |   34 +-
 .../ql/udf/generic/GenericUDFTimestamp.java     |    8 +-
 .../hive/ql/udf/generic/GenericUDFToDate.java   |    3 +-
 .../udf/generic/GenericUDFToUnixTimeStamp.java  |   11 +-
 .../hadoop/hive/ql/util/DateTimeMath.java       |  193 +-
 .../ql/exec/vector/RandomRowObjectSource.java   |   21 +-
 .../TestTimestampWritableAndColumnVector.java   |   68 +
 .../exec/vector/TestVectorizationContext.java   |   38 +-
 .../TestConstantVectorExpression.java           |    4 +-
 .../expressions/TestVectorDateExpressions.java  |   10 +-
 .../TestVectorExpressionWriters.java            |  136 +-
 .../TestVectorFilterExpressions.java            |   48 +-
 .../TestVectorGenericDateExpressions.java       |   22 +-
 .../expressions/TestVectorMathFunctions.java    |   87 +
 .../TestVectorTimestampExpressions.java         |  364 ++--
 .../vector/expressions/TestVectorTypeCasts.java |  221 +-
 .../FakeVectorRowBatchFromObjectIterables.java  |    8 +-
 .../vector/util/VectorizedRowGroupGenUtil.java  |   35 +-
 .../hive/ql/io/orc/TestInputOutputFormat.java   |    7 +-
 .../hadoop/hive/ql/io/orc/TestOrcFile.java      |    5 +-
 .../hadoop/hive/ql/io/orc/TestOrcFile.java.orig | 2034 ++++++++++++++++++
 .../hive/ql/io/orc/TestVectorizedORCReader.java |   10 +-
 .../clientpositive/vector_interval_arithmetic.q |  174 ++
 .../clientpositive/vectorized_timestamp.q       |   27 +
 .../tez/vector_interval_arithmetic.q.out        | 1078 ++++++++++
 .../tez/vector_join_part_col_char.q.out         |  172 +-
 .../tez/vectorized_timestamp.q.out              |  258 +++
 .../vector_interval_arithmetic.q.out            | 1026 +++++++++
 .../results/clientpositive/vector_udf1.q.out    |  113 +-
 .../clientpositive/vectorized_casts.q.out       |   18 +-
 .../clientpositive/vectorized_timestamp.q.out   |  239 ++
 .../fast/BinarySortableSerializeWrite.java      |   13 -
 .../hadoop/hive/serde2/fast/SerializeWrite.java |    3 -
 .../hive/serde2/io/TimestampWritable.java       |   88 +-
 .../lazy/fast/LazySimpleSerializeWrite.java     |   20 -
 .../fast/LazyBinarySerializeWrite.java          |   36 -
 231 files changed, 24767 insertions(+), 4361 deletions(-)
----------------------------------------------------------------------



[16/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleScalar.txt
new file mode 100644
index 0000000..31c3f6b
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareLongDoubleScalar.txt
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterTimestampColumnCompareScalar.txt, which covers comparison
+ * expressions between a timestamp column and a long/double scalar, however output is not produced
+ * in a separate column. The selected vector of the input {@link VectorizedRowBatch} is updated
+ * for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <OperandType> value;
+
+  public <ClassName>(int colNum, <OperandType> value) {
+    this.colNum = colNum;
+    this.value = value;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[colNum];
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector.noNulls) {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!(inputColVector.<GetTimestampLongDoubleMethod>(0) <OperatorSymbol> value)) {
+          //Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!(inputColVector.<GetTimestampLongDoubleMethod>(0) <OperatorSymbol> value)) {
+            //Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+        //Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (inputColVector.<GetTimestampLongDoubleMethod>(i) <OperatorSymbol> value) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  public int getColNum() {
+    return colNum;
+  }
+
+  public void setColNum(int colNum) {
+    this.colNum = colNum;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareScalar.txt
deleted file mode 100644
index 0c37b4d..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareScalar.txt
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
-
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-
-/**
- * Generated from template FilterTimestampColumnCompareScalar.txt, which covers comparison 
- * expressions between a timestamp column and a long or double scalar, however output is not
- * produced in a separate column. 
- * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
- * Note: For timestamp and long or double we implicitly interpret the long as the number
- * of seconds or double as seconds and fraction since the epoch.
- */
-public class <ClassName> extends <BaseClassName> {
-
-  public <ClassName>(int colNum, <OperandType> value) { 
-    super(colNum, TimestampUtils.<TimestampScalarConversion>(value));
-  }
-
-  public <ClassName>() {
-    super();
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.FILTER)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt
new file mode 100644
index 0000000..31dce1c
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampColumn.txt
@@ -0,0 +1,451 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+
+/**
+ * Generated from template FilterTimestampColumnCompareColumn.txt, which covers binary comparison
+ * filter expressions between two columns. Output is not produced in a separate column.
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+
+  public <ClassName>(int colNum1, int colNum2) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #1 is type <OperandType>.
+    <InputColumnVectorType> inputColVector1 = (<InputColumnVectorType>) batch.cols[colNum1];
+
+     // Input #2 is type <OperandType>.
+    <InputColumnVectorType> inputColVector2 = (<InputColumnVectorType>) batch.cols[colNum2];
+
+    int[] sel = batch.selected;
+    boolean[] nullPos1 = inputColVector1.isNull;
+    boolean[] nullPos2 = inputColVector2.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    // handle case where neither input has nulls
+    if (inputColVector1.noNulls && inputColVector2.noNulls) {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+
+        /* Either all must remain selected or all will be eliminated.
+         * Repeating property will not change.
+         */
+        if (!(inputColVector1.compareTo(0, inputColVector2, 0) <OperatorSymbol> 0)) {
+          batch.size = 0;
+        }
+      } else if (inputColVector1.isRepeating) {
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < batch.size) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+
+    // handle case where only input 2 has nulls
+    } else if (inputColVector1.noNulls) {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+        if (nullPos2[0] ||
+            !(inputColVector1.compareTo(0, inputColVector2, 0) <OperatorSymbol> 0)) {
+          batch.size = 0;
+        }
+      } else if (inputColVector1.isRepeating) {
+
+         // no need to check for nulls in input 1
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos2[i]) {
+              if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos2[i]) {
+              if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+        if (nullPos2[0]) {
+
+          // no values will qualify because every comparison will be with NULL
+          batch.size = 0;
+          return;
+        }
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else { // neither input is repeating
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos2[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos2[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      }
+
+    // handle case where only input 1 has nulls
+    } else if (inputColVector2.noNulls) {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+        if (nullPos1[0] ||
+            !(inputColVector1.compareTo(0, inputColVector2, 0) <OperatorSymbol> 0)) {
+          batch.size = 0;
+          return;
+        }
+      } else if (inputColVector1.isRepeating) {
+        if (nullPos1[0]) {
+
+          // if repeating value is null then every comparison will fail so nothing qualifies
+          batch.size = 0;
+          return;
+        }
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else { // neither input is repeating
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      }
+
+    // handle case where both inputs have nulls
+    } else {
+      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+        if (nullPos1[0] || nullPos2[0] ||
+            !(inputColVector1.compareTo(0, inputColVector2, 0) <OperatorSymbol> 0)) {
+          batch.size = 0;
+        }
+      } else if (inputColVector1.isRepeating) {
+         if (nullPos1[0]) {
+           batch.size = 0;
+           return;
+         }
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos2[i]) {
+              if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos2[i]) {
+              if (inputColVector1.compareTo(0, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else if (inputColVector2.isRepeating) {
+        if (nullPos2[0]) {
+          batch.size = 0;
+          return;
+        }
+        if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, 0) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      } else { // neither input is repeating
+         if (batch.selectedInUse) {
+          int newSize = 0;
+          for(int j = 0; j != n; j++) {
+            int i = sel[j];
+            if (!nullPos1[i] && !nullPos2[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          batch.size = newSize;
+        } else {
+          int newSize = 0;
+          for(int i = 0; i != n; i++) {
+            if (!nullPos1[i] && !nullPos2[i]) {
+              if (inputColVector1.compareTo(i, inputColVector2, i) <OperatorSymbol> 0) {
+                sel[newSize++] = i;
+              }
+            }
+          }
+          if (newSize < batch.size) {
+            batch.size = newSize;
+            batch.selectedInUse = true;
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
index d13fecf..bab8508 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
@@ -15,27 +15,135 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- 
+
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
- * Generated from template FilterTimestampColumnCompareTimestampScalar.txt, which covers comparison 
- * expressions between a timestamp column and a timestamp scalar, however output is not
- * produced in a separate column. 
+ * Generated from template FilterColumnCompareScalar.txt, which covers binary comparison
+ * expressions between a column and a scalar, however output is not produced in a separate column.
  * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType> value;
 
-  public <ClassName>(int colNum, long value) { 
-    super(colNum, value);
+  public <ClassName>(int colNum, <HiveOperandType> value) {
+    this.colNum = colNum;
+    this.value = value;
   }
 
   public <ClassName>() {
-    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+     // Input #1 is type <OperandType>.
+    <InputColumnVectorType> inputColVector1 = (<InputColumnVectorType>) batch.cols[colNum];
+
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector1.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.noNulls) {
+      if (inputColVector1.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!(inputColVector1.compareTo(0, value) <OperatorSymbol> 0)) {
+          //Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (inputColVector1.compareTo(i, value) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (inputColVector1.compareTo(i, value) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector1.isRepeating) {
+        //All must be selected otherwise size would be zero
+        //Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!(inputColVector1.compareTo(0, value) <OperatorSymbol> 0)) {
+            //Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j=0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (inputColVector1.compareTo(i, value) <OperatorSymbol> 0) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+        //Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (inputColVector1.compareTo(i, value) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
   }
 
   @Override
@@ -45,10 +153,10 @@ public class <ClassName> extends <BaseClassName> {
             VectorExpressionDescriptor.Mode.FILTER)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
             VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt
new file mode 100644
index 0000000..5e418de
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareLongDoubleColumn.txt
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template FilterScalarCompareColumn.txt, which covers binary comparison
+ * expressions between a scalar and a column, however output is not produced in a separate column.
+ * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ */
+public class <ClassName> extends <BaseClassName> {
+
+  private static final long serialVersionUID = 1L;
+
+  public <ClassName>(Timestamp value, int colNum) {
+    super(TimestampColumnVector.<GetTimestampLongDoubleMethod>(value), colNum);
+  }
+
+  public <ClassName>() {
+    super();
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.SCALAR,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
index a37db3d..ff5d11e 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
@@ -15,26 +15,137 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
- 
+
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
+import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 
 /**
- * Generated from template FilterTimestampScalarCompareTimestampColumn.txt, which covers comparison 
- * expressions between a timestamp scalar and a column, however output is not produced in a separate column. 
- * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering.
+ * This is a generated class to evaluate a <OperatorSymbol> comparison on a vector of timestamp
+ * values.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private <HiveOperandType> value;
 
-  public <ClassName>(long value, int colNum) { 
-    super(value, colNum);
+  public <ClassName>(<HiveOperandType> value, int colNum) {
+    this.colNum = colNum;
+    this.value = value;
   }
 
   public <ClassName>() {
-    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+     // Input #2 is type <OperandType>.
+    <InputColumnVectorType> inputColVector2 = (<InputColumnVectorType>) batch.cols[colNum];
+
+    int[] sel = batch.selected;
+    boolean[] nullPos = inputColVector2.isNull;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.noNulls) {
+      if (inputColVector2.isRepeating) {
+
+        // All must be selected otherwise size would be zero. Repeating property will not change.
+        if (!(inputColVector2.compareTo(value, 0) <OperatorSymbol> 0)) {
+
+          // Entire batch is filtered out.
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (inputColVector2.compareTo(value, i) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (inputColVector2.compareTo(value, i) <OperatorSymbol> 0) {
+            sel[newSize++] = i;
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    } else {
+      if (inputColVector2.isRepeating) {
+
+        // All must be selected otherwise size would be zero. Repeating property will not change.
+        if (!nullPos[0]) {
+          if (!(inputColVector2.compareTo(value, 0) <OperatorSymbol> 0)) {
+
+            // Entire batch is filtered out.
+            batch.size = 0;
+          }
+        } else {
+          batch.size = 0;
+        }
+      } else if (batch.selectedInUse) {
+        int newSize = 0;
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          if (!nullPos[i]) {
+           if (inputColVector2.compareTo(value, i) <OperatorSymbol> 0) {
+             sel[newSize++] = i;
+           }
+          }
+        }
+
+        // Change the selected vector
+        batch.size = newSize;
+      } else {
+        int newSize = 0;
+        for(int i = 0; i != n; i++) {
+          if (!nullPos[i]) {
+            if (inputColVector2.compareTo(value, i) <OperatorSymbol> 0) {
+              sel[newSize++] = i;
+            }
+          }
+        }
+        if (newSize < n) {
+          batch.size = newSize;
+          batch.selectedInUse = true;
+        }
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return -1;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "boolean";
   }
 
   @Override
@@ -44,8 +155,8 @@ public class <ClassName> extends <BaseClassName> {
             VectorExpressionDescriptor.Mode.FILTER)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"),
-            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"),
+            VectorExpressionDescriptor.ArgumentType.getType("<OperandType>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.SCALAR,
             VectorExpressionDescriptor.InputExpressionType.COLUMN).build();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt
deleted file mode 100644
index c182557..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt
+++ /dev/null
@@ -1,177 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.util.DateTimeMath;
-
-/**
- * Generated from template IntervalColumnArithmeticDateTimeColumnWithConvert.txt, which covers binary arithmetic 
- * expressions between columns.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-  
-  private int colNum1;
-  private int colNum2;
-  private int outputColumn;
-  private DateTimeMath dtm = new DateTimeMath();
-
-  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
-    this.colNum1 = colNum1;
-    this.colNum2 = colNum2;
-    this.outputColumn = outputColumn;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
-    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
-    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
-    int[] sel = batch.selected;
-    int n = batch.size;
-    <VectorOperandType1>[] vector1 = inputColVector1.vector;
-    <VectorOperandType2>[] vector2 = inputColVector2.vector;
-    <VectorReturnType>[] outputVector = outputColVector.vector;
-
-    // arg1 is interval type, arg2 is datetime type
-    
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-    
-    outputColVector.isRepeating = 
-         inputColVector1.isRepeating && inputColVector2.isRepeating
-      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
-      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
-    
-    // Handle nulls first  
-    NullUtil.propagateNullsColCol(
-      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
-          
-    /* Disregard nulls for processing. In other words,
-     * the arithmetic operation is performed even if one or 
-     * more inputs are null. This is to improve speed by avoiding
-     * conditional checks in the inner loop.
-     */ 
-    if (inputColVector1.isRepeating && inputColVector2.isRepeating) { 
-      outputVector[0] = <OperatorFunction>(<TypeConversionToMillis>(vector2[0]), <OperatorSymbol> (int) vector1[0]);
-    } else if (inputColVector1.isRepeating) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector2[0]), <OperatorSymbol> (int) vector1[i]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector2[0]), <OperatorSymbol> (int) vector1[i]);
-        }
-      }
-    } else if (inputColVector2.isRepeating) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector2[i]), <OperatorSymbol> (int) vector1[0]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector2[i]), <OperatorSymbol> (int) vector1[0]);
-        }
-      }
-    } else {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector2[i]), <OperatorSymbol> (int) vector1[i]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector2[i]), <OperatorSymbol> (int) vector1[i]);
-        }
-      }
-    }
-    
-    /* For the case when the output can have null values, follow 
-     * the convention that the data values must be 1 for long and 
-     * NaN for double. This is to prevent possible later zero-divide errors
-     * in complex arithmetic expressions like col2 / (col1 - 1)
-     * in the case when some col1 entries are null.
-     */
-    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "<VectorReturnType>";
-  }
-  
-  public int getColNum1() {
-    return colNum1;
-  }
-
-  public void setColNum1(int colNum1) {
-    this.colNum1 = colNum1;
-  }
-
-  public int getColNum2() {
-    return colNum2;
-  }
-
-  public void setColNum2(int colNum2) {
-    this.colNum2 = colNum2;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-  
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt
deleted file mode 100644
index 8fa3563..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt
+++ /dev/null
@@ -1,154 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.<InputColumnVectorType>;
-import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.util.DateTimeMath;
-
-/**
- * Generated from template IntervalColumnArithmeticDateTimeScalarWithConvert.txt, which covers binary arithmetic 
- * expressions between a column and a scalar.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private <VectorOperandType2> value;
-  private int outputColumn;
-  private DateTimeMath dtm = new DateTimeMath();
-
-  public <ClassName>(int colNum, <VectorOperandType2> value, int outputColumn) {
-    this.colNum = colNum;
-    this.value = <TypeConversionToMillis>(value);
-    this.outputColumn = outputColumn;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
-    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
-    int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector.isNull;
-    boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector.noNulls;
-    outputColVector.isRepeating = inputColVector.isRepeating;
-    int n = batch.size;
-    <VectorOperandType1>[] vector = inputColVector.vector;
-    <VectorReturnType>[] outputVector = outputColVector.vector;
-    
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    // arg1 is interval, arg2 is datetime
-
-    if (inputColVector.isRepeating) {
-      outputVector[0] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[0]);
-      
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
-    } else if (inputColVector.noNulls) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-        }
-      }
-    } else /* there are nulls */ {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-          outputIsNull[i] = inputIsNull[i];
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(value, <OperatorSymbol> (int) vector[i]);
-        }
-        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
-      }
-    }
-    
-    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-  
-  @Override
-  public String getOutputType() {
-    return "<VectorReturnType>";
-  }
-  
-  public int getColNum() {
-    return colNum;
-  }
-  
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public <VectorOperandType2> getValue() {
-    return value;
-  }
-
-  public void setValue(<VectorOperandType2> value) {
-    this.value = value;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN,
-            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt
deleted file mode 100644
index 0464a5e..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt
+++ /dev/null
@@ -1,167 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-
-
-/*
- * Because of the templatized nature of the code, either or both
- * of these ColumnVector imports may be needed. Listing both of them
- * rather than using ....vectorization.*;
- */
-import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.util.DateTimeMath;
-
-/**
- * Generated from template IntervalScalarArithmeticDateTimeColumnWithConvert.txt.
- * Implements a vectorized arithmetic operator with a scalar on the left and a
- * column vector on the right. The result is output to an output column vector.
- */
-public class <ClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private <VectorOperandType1> value;
-  private int outputColumn;
-  private DateTimeMath dtm = new DateTimeMath();
-
-  public <ClassName>(<VectorOperandType1> value, int colNum, int outputColumn) {
-    this.colNum = colNum;
-    this.value = value;
-    this.outputColumn = outputColumn;
-  }
-
-  public <ClassName>() {
-  }
-
-  @Override
-  /**
-   * Method to evaluate scalar-column operation in vectorized fashion.
-   *
-   * @batch a package of rows with each column stored in a vector
-   */
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
-    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
-    int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector.isNull;
-    boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector.noNulls;
-    outputColVector.isRepeating = inputColVector.isRepeating;
-    int n = batch.size;
-    <VectorOperandType2>[] vector = inputColVector.vector;
-    <VectorReturnType>[] outputVector = outputColVector.vector;
-    
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    // arg1 is interval, arg2 is datetime
-
-    if (inputColVector.isRepeating) {
-      outputVector[0] = <OperatorFunction>(<TypeConversionToMillis>(vector[0]), <OperatorSymbol> (int) value);
-      
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
-    } else if (inputColVector.noNulls) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-        }
-      }
-    } else {                         /* there are nulls */ 
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-          outputIsNull[i] = inputIsNull[i];
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputVector[i] = <OperatorFunction>(<TypeConversionToMillis>(vector[i]), <OperatorSymbol> (int) value);
-        }
-        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
-      }
-    }
-    
-    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-  
-  @Override
-  public String getOutputType() {
-    return "<VectorReturnType>";
-  }
-  
-  public int getColNum() {
-    return colNum;
-  }
-
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public <VectorOperandType1> getValue() {
-    return value;
-  }
-
-  public void setValue(<VectorOperandType1> value) {
-    this.value = value;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(2)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.SCALAR,
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
new file mode 100644
index 0000000..8e3a419
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth1;
+  private Date scratchDate2;
+  private Date outputDate;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth1 = new HiveIntervalYearMonth();
+    scratchDate2 = new Date(0);
+    outputDate = new Date(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type interval_year_month.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type date.
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
+
+    // Output is type date.
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+    long[] vector2 = inputColVector2.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = 
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first  
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchIntervalYearMonth1.set((int) vector1[0]);
+      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.<OperatorMethod>(
+          scratchIntervalYearMonth1, scratchDate2, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+    } else if (inputColVector1.isRepeating) {
+      scratchIntervalYearMonth1.set((int) vector1[0]);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, scratchDate2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesLong(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
new file mode 100644
index 0000000..ad65d52
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+/**
+ * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Date value;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth1;
+  private Date outputDate;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, long value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = new Date(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth1 = new HiveIntervalYearMonth();
+    outputDate = new Date(0);
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type interval_year_mont (epochMonths).
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type date.
+    LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+    long[] outputVector = outputColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      scratchIntervalYearMonth1.set((int) vector1[0]);
+      dtm.<OperatorMethod>(
+          scratchIntervalYearMonth1, value, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "long";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt
new file mode 100644
index 0000000..858c3d7
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampColumn.txt
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth1;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth1 = new HiveIntervalYearMonth();
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type Interval_Year_Month.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type Timestamp.
+    TimestampColumnVector inputColVector2 = (TimestampColumnVector) batch.cols[colNum2];
+
+    // Output is type Timestamp.
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating = 
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchIntervalYearMonth1.set((int) vector1[0]);
+      dtm.<OperatorMethod>(
+          scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(0), outputColVector.getScratchTimestamp());
+      outputColVector.setFromScratchTimestamp(0);
+    } else if (inputColVector1.isRepeating) {
+      scratchIntervalYearMonth1.set((int) vector1[0]);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      Timestamp value2 = inputColVector2.asScratchTimestamp(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, inputColVector2.asScratchTimestamp(i), outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntriesTimestamp(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
new file mode 100644
index 0000000..66fffd2
--- /dev/null
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
+
+/**
+ * Generated from template TimestampColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth1;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, Timestamp value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+    scratchIntervalYearMonth1 = new HiveIntervalYearMonth();
+  }
+
+  public <ClassName>() {
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type interval_year_month.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type Timestamp.
+    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      scratchIntervalYearMonth1.set((int) vector1[0]);
+      dtm.<OperatorMethod>(
+          scratchIntervalYearMonth1, value, outputColVector.getScratchTimestamp());
+      outputColVector.setFromScratchTimestamp(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchIntervalYearMonth1.set((int) vector1[i]);
+          dtm.<OperatorMethod>(
+              scratchIntervalYearMonth1, value, outputColVector.getScratchTimestamp());
+          outputColVector.setFromScratchTimestamp(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
+  public VectorExpressionDescriptor.Descriptor getDescriptor() {
+    return (new VectorExpressionDescriptor.Builder())
+        .setMode(
+            VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(2)
+        .setArgumentTypes(
+            VectorExpressionDescriptor.ArgumentType.getType("interval_year_month"),
+            VectorExpressionDescriptor.ArgumentType.getType("timestamp"))
+        .setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+  }
+}