You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vg...@apache.org on 2018/05/15 22:40:48 UTC

[28/50] [abbrv] hive git commit: HIVE-19384: Vectorization: IfExprTimestamp* do not handle NULLs correctly (Matt McCline, reviewed by Teddy Choi)

http://git-wip-us.apache.org/repos/asf/hive/blob/e2d4f347/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
index 244aca6..01e915b 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
@@ -1,15 +1,19 @@
-PREHOOK: query: CREATE TABLE alltypesorc_string(ctimestamp1 timestamp, stimestamp1 string) STORED AS ORC
+PREHOOK: query: CREATE TABLE alltypesorc_string(cboolean1 boolean, ctimestamp1 timestamp, stimestamp1 string,
+    ctimestamp2 timestamp) STORED AS ORC
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@alltypesorc_string
-POSTHOOK: query: CREATE TABLE alltypesorc_string(ctimestamp1 timestamp, stimestamp1 string) STORED AS ORC
+POSTHOOK: query: CREATE TABLE alltypesorc_string(cboolean1 boolean, ctimestamp1 timestamp, stimestamp1 string,
+    ctimestamp2 timestamp) STORED AS ORC
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@alltypesorc_string
 PREHOOK: query: INSERT OVERWRITE TABLE alltypesorc_string
 SELECT
+  cboolean1,
   to_utc_timestamp(ctimestamp1, 'America/Los_Angeles') AS toutc,
-  CAST(to_utc_timestamp(ctimestamp1, 'America/Los_Angeles') AS STRING) as cst
+  CAST(to_utc_timestamp(ctimestamp1, 'America/Los_Angeles') AS STRING) as cst,
+  ctimestamp2
 FROM alltypesorc
 ORDER BY toutc, cst
 LIMIT 40
@@ -18,16 +22,146 @@ PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: default@alltypesorc_string
 POSTHOOK: query: INSERT OVERWRITE TABLE alltypesorc_string
 SELECT
+  cboolean1,
   to_utc_timestamp(ctimestamp1, 'America/Los_Angeles') AS toutc,
-  CAST(to_utc_timestamp(ctimestamp1, 'America/Los_Angeles') AS STRING) as cst
+  CAST(to_utc_timestamp(ctimestamp1, 'America/Los_Angeles') AS STRING) as cst,
+  ctimestamp2
 FROM alltypesorc
 ORDER BY toutc, cst
 LIMIT 40
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ]
 POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ]
 POSTHOOK: Lineage: alltypesorc_string.stimestamp1 EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ]
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (false, '2021-09-24 03:18:32.4', '1978-08-05 14:41:05.501', '1999-10-03 16:59:10.396903939')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (false, '2021-09-24 03:18:32.4', '1978-08-05 14:41:05.501', '1999-10-03 16:59:10.396903939')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (false, null, '2013-04-10 00:43:46.8547315', null)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (false, null, '2013-04-10 00:43:46.8547315', null)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (false, '2021-09-24 03:18:32.4', null, null)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (false, '2021-09-24 03:18:32.4', null, null)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 EXPRESSION []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (null, '7160-12-02 06:00:24.81200852', '0004-09-22 18:26:29.519542222', '1966-08-16 13:36:50.183')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (null, '7160-12-02 06:00:24.81200852', '0004-09-22 18:26:29.519542222', '1966-08-16 13:36:50.183')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (null, null, '4966-12-04 09:30:55.202', null)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (null, null, '4966-12-04 09:30:55.202', null)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (null, '7160-12-02 06:00:24.81200852', null, null)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (null, '7160-12-02 06:00:24.81200852', null, null)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 EXPRESSION []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (true, '1985-07-20 09:30:11.0', '8521-01-16 20:42:05.668832', '1319-02-02 16:31:57.778')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (true, '1985-07-20 09:30:11.0', '8521-01-16 20:42:05.668832', '1319-02-02 16:31:57.778')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (true, null, '1883-04-17 04:14:34.64776', '2024-11-11 16:42:41.101')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (true, null, '1883-04-17 04:14:34.64776', '2024-11-11 16:42:41.101')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values (true, '0528-10-27 08:15:18.941718273', null, null)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values (true, '0528-10-27 08:15:18.941718273', null, null)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 EXPRESSION []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 EXPRESSION []
+PREHOOK: query: INSERT INTO TABLE alltypesorc_string values
+     (false, '2021-09-24 03:18:32.4', '1985-11-18 16:37:54.0', '2010-04-08 02:43:35.861742727'),
+     (true, null, '1985-11-18 16:37:54.0', null),
+     (null, '2021-09-24 03:18:32.4', null, '1974-10-04 17:21:03.989')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesorc_string
+POSTHOOK: query: INSERT INTO TABLE alltypesorc_string values
+     (false, '2021-09-24 03:18:32.4', '1985-11-18 16:37:54.0', '2010-04-08 02:43:35.861742727'),
+     (true, null, '1985-11-18 16:37:54.0', null),
+     (null, '2021-09-24 03:18:32.4', null, '1974-10-04 17:21:03.989')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesorc_string
+POSTHOOK: Lineage: alltypesorc_string.cboolean1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp1 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.ctimestamp2 SCRIPT []
+POSTHOOK: Lineage: alltypesorc_string.stimestamp1 SCRIPT []
 PREHOOK: query: CREATE TABLE alltypesorc_wrong(stimestamp1 string) STORED AS ORC
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
@@ -72,7 +206,15 @@ PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   weekofyear(ctimestamp1),
   hour(ctimestamp1),
   minute(ctimestamp1),
-  second(ctimestamp1)
+  second(ctimestamp1),
+  cboolean1,
+  ctimestamp1,
+  ctimestamp2,
+  if (cboolean1, ctimestamp1, timestamp '1319-02-02 16:31:57.778'),
+  if (cboolean1, timestamp '2000-12-18 08:42:30.0005', ctimestamp1),
+  if (cboolean1, ctimestamp1, ctimestamp2),
+  if (cboolean1, ctimestamp1, null),
+  if (cboolean1, null, ctimestamp2)
 FROM alltypesorc_string
 ORDER BY c1
 PREHOOK: type: QUERY
@@ -85,7 +227,15 @@ POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   weekofyear(ctimestamp1),
   hour(ctimestamp1),
   minute(ctimestamp1),
-  second(ctimestamp1)
+  second(ctimestamp1),
+  cboolean1,
+  ctimestamp1,
+  ctimestamp2,
+  if (cboolean1, ctimestamp1, timestamp '1319-02-02 16:31:57.778'),
+  if (cboolean1, timestamp '2000-12-18 08:42:30.0005', ctimestamp1),
+  if (cboolean1, ctimestamp1, ctimestamp2),
+  if (cboolean1, ctimestamp1, null),
+  if (cboolean1, null, ctimestamp2)
 FROM alltypesorc_string
 ORDER BY c1
 POSTHOOK: type: QUERY
@@ -103,18 +253,18 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: alltypesorc_string
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), dayofmonth(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+              expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), dayofmonth(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [3, 4, 5, 6, 7, 8, 9, 10, 11]
-                  selectExpressions: VectorUDFUnixTimeStampTimestamp(col 0:timestamp) -> 3:bigint, VectorUDFYearTimestamp(col 0:timestamp, field YEAR) -> 4:int, VectorUDFMonthTimestamp(col 0:timestamp, field MONTH) -> 5:int, VectorUDFDayOfMonthTimestamp(col 0:timestamp, field DAY_OF_MONTH) -> 6:int, VectorUDFDayOfMonthTimestamp(col 0:timestamp, field DAY_OF_MONTH) -> 7:int, VectorUDFWeekOfYearTimestamp(col 0:timestamp, field WEEK_OF_YEAR) -> 8:int, VectorUDFHourTimestamp(col 0:timestamp, field HOUR_OF_DAY) -> 9:int, VectorUDFMinuteTimestamp(col 0:timestamp, field MINUTE) -> 10:int, VectorUDFSecondTimestamp(col 0:timestamp, field SECOND) -> 11:int
-              Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                  projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18]
+                  selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boolean, co
 l 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp
+              Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: bigint)
                 sort order: +
@@ -123,8 +273,8 @@ STAGE PLANS:
                     native: false
                     nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                     nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
-                Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
-                value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int)
+                Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
       Execution mode: vectorized
       Map Vectorization:
           enabled: true
@@ -141,12 +291,12 @@ STAGE PLANS:
           enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
       Reduce Operator Tree:
         Select Operator
-          expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-          Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+          expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int), VALUE._col8 (type: boolean), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp), VALUE._col15 (type: timestamp)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
+          Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -167,7 +317,15 @@ PREHOOK: query: SELECT
   weekofyear(ctimestamp1),
   hour(ctimestamp1),
   minute(ctimestamp1),
-  second(ctimestamp1)
+  second(ctimestamp1),
+  cboolean1,
+  ctimestamp1,
+  ctimestamp2,
+  if (cboolean1, ctimestamp1, timestamp '1319-02-02 16:31:57.778'),
+  if (cboolean1, timestamp '2000-12-18 08:42:30.0005', ctimestamp1),
+  if (cboolean1, ctimestamp1, ctimestamp2),
+  if (cboolean1, ctimestamp1, null),
+  if (cboolean1, null, ctimestamp2)
 FROM alltypesorc_string
 ORDER BY c1
 PREHOOK: type: QUERY
@@ -182,52 +340,72 @@ POSTHOOK: query: SELECT
   weekofyear(ctimestamp1),
   hour(ctimestamp1),
   minute(ctimestamp1),
-  second(ctimestamp1)
+  second(ctimestamp1),
+  cboolean1,
+  ctimestamp1,
+  ctimestamp2,
+  if (cboolean1, ctimestamp1, timestamp '1319-02-02 16:31:57.778'),
+  if (cboolean1, timestamp '2000-12-18 08:42:30.0005', ctimestamp1),
+  if (cboolean1, ctimestamp1, ctimestamp2),
+  if (cboolean1, ctimestamp1, null),
+  if (cboolean1, null, ctimestamp2)
 FROM alltypesorc_string
 ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+-45479000681	528	10	27	27	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+1632478712	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
+490725011	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:44.028	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:44.028	NULL	1969-12-31 15:59:44.028
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:44.809	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:44.809	NULL	1969-12-31 15:59:44.809
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:45.949	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:45.949	NULL	1969-12-31 15:59:45.949
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:50.531	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:50.531	NULL	1969-12-31 15:59:50.531
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:51.009	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:51.009	NULL	1969-12-31 15:59:51.009
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:53.761	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:53.761	NULL	1969-12-31 15:59:53.761
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:00.905	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:00.905	NULL	1969-12-31 16:00:00.905
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:03.586	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:03.586	NULL	1969-12-31 16:00:03.586
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:05.227	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:05.227	NULL	1969-12-31 16:00:05.227
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:05.535	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:05.535	NULL	1969-12-31 16:00:05.535
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:07.02	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:07.02	NULL	1969-12-31 16:00:07.02
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:07.365	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:07.365	NULL	1969-12-31 16:00:07.365
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:07.517	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:07.517	NULL	1969-12-31 16:00:07.517
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:07.767	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:07.767	NULL	1969-12-31 16:00:07.767
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:08.602	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:08.602	NULL	1969-12-31 16:00:08.602
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:09.938	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:09.938	NULL	1969-12-31 16:00:09.938
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:14.214	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:14.214	NULL	1969-12-31 16:00:14.214
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 16:00:14.783	1319-02-02 16:31:57.778	NULL	1969-12-31 16:00:14.783	NULL	1969-12-31 16:00:14.783
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:43.773	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:44.262	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:44.568	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:45.697	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:47.351	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:47.446	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:48.023	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:48.629	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:49.177	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:49.208	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:50.789	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:51.245	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:52.372	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 15:59:55.249	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:00.661	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:00.784	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:09.313	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:09.538	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:09.986	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:11.031	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	1969-12-31 16:00:11.465	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	2024-11-11 16:42:41.101	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	NULL	2000-12-18 08:42:30.0005	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   to_unix_timestamp(stimestamp1) AS c1,
   year(stimestamp1),
@@ -268,7 +446,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: alltypesorc_string
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
             Select Operator
@@ -277,9 +455,9 @@ STAGE PLANS:
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [3, 4, 5, 6, 7, 8, 9, 10, 11]
-                  selectExpressions: VectorUDFUnixTimeStampString(col 1:string) -> 3:bigint, VectorUDFYearString(col 1:string, fieldStart 0, fieldLength 4) -> 4:int, VectorUDFMonthString(col 1:string, fieldStart 5, fieldLength 2) -> 5:int, VectorUDFDayOfMonthString(col 1:string, fieldStart 8, fieldLength 2) -> 6:int, VectorUDFDayOfMonthString(col 1:string, fieldStart 8, fieldLength 2) -> 7:int, VectorUDFWeekOfYearString(col 1:string) -> 8:int, VectorUDFHourString(col 1:string, fieldStart 11, fieldLength 2) -> 9:int, VectorUDFMinuteString(col 1:string, fieldStart 14, fieldLength 2) -> 10:int, VectorUDFSecondString(col 1:string, fieldStart 17, fieldLength 2) -> 11:int
-              Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                  projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13]
+                  selectExpressions: VectorUDFUnixTimeStampString(col 2:string) -> 5:bigint, VectorUDFYearString(col 2:string, fieldStart 0, fieldLength 4) -> 6:int, VectorUDFMonthString(col 2:string, fieldStart 5, fieldLength 2) -> 7:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 8:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 9:int, VectorUDFWeekOfYearString(col 2:string) -> 10:int, VectorUDFHourString(col 2:string, fieldStart 11, fieldLength 2) -> 11:int, VectorUDFMinuteString(col 2:string, fieldStart 14, fieldLength 2) -> 12:int, VectorUDFSecondString(col 2:string, fieldStart 17, fieldLength 2) -> 13:int
+              Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: bigint)
                 sort order: +
@@ -288,7 +466,7 @@ STAGE PLANS:
                     native: false
                     nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                     nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
-                Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int)
       Execution mode: vectorized
       Map Vectorization:
@@ -308,10 +486,10 @@ STAGE PLANS:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-          Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -353,6 +531,18 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
+-2736243926	1883	4	17	17	16	4	14	34
+-62018170411	4	9	22	22	39	18	26	29
+1365579826	2013	4	10	10	15	0	43	46
+206731024925	8521	1	16	16	3	20	42	5
+271201265	1978	8	5	5	31	14	41	5
+501208674	1985	11	18	18	47	16	37	54
+501208674	1985	11	18	18	47	16	37	54
+94573848655	4966	12	4	4	49	9	30	55
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
@@ -433,7 +623,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: alltypesorc_string
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
             Select Operator
@@ -442,9 +632,9 @@ STAGE PLANS:
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13]
-                  selectExpressions: LongColEqualLongColumn(col 3:bigint, col 4:bigint)(children: VectorUDFUnixTimeStampTimestamp(col 0:timestamp) -> 3:bigint, VectorUDFUnixTimeStampString(col 1:string) -> 4:bigint) -> 5:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFYearTimestamp(col 0:timestamp, field YEAR) -> 3:int, VectorUDFYearString(col 1:string, fieldStart 0, fieldLength 4) -> 4:int) -> 6:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFMonthTimestamp(col 0:timestamp, field MONTH) -> 3:int, VectorUDFMonthString(col 1:string, fieldStart 5, fieldLength 2) -> 4:int) -> 7:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFDayOfMonthTimestamp(col 0:timestamp, field DAY_OF_MONTH) -> 3:int, VectorUDFDayOfMonthString(col 1:string, fieldStart 8, fieldLength 2) -> 4:int) -> 8:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFDayOfMonthTimestamp(col 0:timestamp, field DAY_OF_MONTH) -> 3:int, V
 ectorUDFDayOfMonthString(col 1:string, fieldStart 8, fieldLength 2) -> 4:int) -> 9:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFWeekOfYearTimestamp(col 0:timestamp, field WEEK_OF_YEAR) -> 3:int, VectorUDFWeekOfYearString(col 1:string) -> 4:int) -> 10:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFHourTimestamp(col 0:timestamp, field HOUR_OF_DAY) -> 3:int, VectorUDFHourString(col 1:string, fieldStart 11, fieldLength 2) -> 4:int) -> 11:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFMinuteTimestamp(col 0:timestamp, field MINUTE) -> 3:int, VectorUDFMinuteString(col 1:string, fieldStart 14, fieldLength 2) -> 4:int) -> 12:boolean, LongColEqualLongColumn(col 3:int, col 4:int)(children: VectorUDFSecondTimestamp(col 0:timestamp, field SECOND) -> 3:int, VectorUDFSecondString(col 1:string, fieldStart 17, fieldLength 2) -> 4:int) -> 13:boolean
-              Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                  projectedOutputColumnNums: [7, 8, 9, 10, 11, 12, 13, 14, 15]
+                  selectExpressions: LongColEqualLongColumn(col 5:bigint, col 6:bigint)(children: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFUnixTimeStampString(col 2:string) -> 6:bigint) -> 7:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 5:int, VectorUDFYearString(col 2:string, fieldStart 0, fieldLength 4) -> 6:int) -> 8:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 5:int, VectorUDFMonthString(col 2:string, fieldStart 5, fieldLength 2) -> 6:int) -> 9:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int) -> 10:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, 
 VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int) -> 11:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 5:int, VectorUDFWeekOfYearString(col 2:string) -> 6:int) -> 12:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 5:int, VectorUDFHourString(col 2:string, fieldStart 11, fieldLength 2) -> 6:int) -> 13:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 5:int, VectorUDFMinuteString(col 2:string, fieldStart 14, fieldLength 2) -> 6:int) -> 14:boolean, LongColEqualLongColumn(col 5:int, col 6:int)(children: VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 5:int, VectorUDFSecondString(col 2:string, fieldStart 17, fieldLength 2) -> 6:int) -> 15:boolean
+              Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: boolean)
                 sort order: +
@@ -453,7 +643,7 @@ STAGE PLANS:
                     native: false
                     nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                     nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
-                Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
                 value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean)
       Execution mode: vectorized
       Map Vectorization:
@@ -473,10 +663,10 @@ STAGE PLANS:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: boolean), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-          Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -558,6 +748,18 @@ NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+false	false	false	false	false	false	false	false	false
+false	false	false	false	false	false	false	false	false
+false	false	false	false	false	false	false	false	false
+false	false	false	false	false	false	false	false	false
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   to_unix_timestamp(stimestamp1) AS c1,
   year(stimestamp1),
@@ -714,7 +916,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: alltypesorc_string
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
             Select Operator
@@ -723,12 +925,12 @@ STAGE PLANS:
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [0]
-              Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                  projectedOutputColumnNums: [1]
+              Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: min(ctimestamp1), max(ctimestamp1), count(ctimestamp1), count()
                 Group By Vectorization:
-                    aggregators: VectorUDAFMinTimestamp(col 0:timestamp) -> timestamp, VectorUDAFMaxTimestamp(col 0:timestamp) -> timestamp, VectorUDAFCount(col 0:timestamp) -> bigint, VectorUDAFCountStar(*) -> bigint
+                    aggregators: VectorUDAFMinTimestamp(col 1:timestamp) -> timestamp, VectorUDAFMaxTimestamp(col 1:timestamp) -> timestamp, VectorUDAFCount(col 1:timestamp) -> bigint, VectorUDAFCountStar(*) -> bigint
                     className: VectorGroupByOperator
                     groupByMode: HASH
                     native: false
@@ -798,7 +1000,7 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-NULL	NULL	0	40
+0528-10-27 08:15:18.941718273	7160-12-02 06:00:24.81200852	8	52
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   round(sum(ctimestamp1), 3)
 FROM alltypesorc_string
@@ -821,7 +1023,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: alltypesorc_string
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
             Select Operator
@@ -830,12 +1032,12 @@ STAGE PLANS:
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [0]
-              Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                  projectedOutputColumnNums: [1]
+              Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: sum(ctimestamp1)
                 Group By Vectorization:
-                    aggregators: VectorUDAFSumTimestamp(col 0:timestamp) -> double
+                    aggregators: VectorUDAFSumTimestamp(col 1:timestamp) -> double
                     className: VectorGroupByOperator
                     groupByMode: HASH
                     native: false
@@ -903,7 +1105,7 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-NULL
+2.89160863229166E11
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   round(avg(ctimestamp1), 0),
   variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19,
@@ -940,7 +1142,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: alltypesorc_string
-            Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
             Select Operator
@@ -949,13 +1151,13 @@ STAGE PLANS:
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [0, 3, 6]
-                  selectExpressions: CastTimestampToDouble(col 0:timestamp) -> 3:double, DoubleColMultiplyDoubleColumn(col 4:double, col 5:double)(children: CastTimestampToDouble(col 0:timestamp) -> 4:double, CastTimestampToDouble(col 0:timestamp) -> 5:double) -> 6:double
-              Statistics: Num rows: 40 Data size: 84 Basic stats: COMPLETE Column stats: NONE
+                  projectedOutputColumnNums: [1, 5, 8]
+                  selectExpressions: CastTimestampToDouble(col 1:timestamp) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastTimestampToDouble(col 1:timestamp) -> 6:double, CastTimestampToDouble(col 1:timestamp) -> 7:double) -> 8:double
+              Statistics: Num rows: 52 Data size: 3515 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: sum(_col0), count(_col0), sum(_col2), sum(_col1)
                 Group By Vectorization:
-                    aggregators: VectorUDAFSumTimestamp(col 0:timestamp) -> double, VectorUDAFCount(col 0:timestamp) -> bigint, VectorUDAFSumDouble(col 6:double) -> double, VectorUDAFSumDouble(col 3:double) -> double
+                    aggregators: VectorUDAFSumTimestamp(col 1:timestamp) -> double, VectorUDAFCount(col 1:timestamp) -> bigint, VectorUDAFSumDouble(col 8:double) -> double, VectorUDAFSumDouble(col 5:double) -> double
                     className: VectorGroupByOperator
                     groupByMode: HASH
                     native: false
@@ -1037,4 +1239,4 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
+3.6145107904E10	false	false	false	7.5245155692476E10	7.5245155692476E10	7.5245155692476E10	8.0440455033059E10

http://git-wip-us.apache.org/repos/asf/hive/blob/e2d4f347/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
----------------------------------------------------------------------
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
index bebf769..f8ed7e2 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
@@ -180,6 +180,70 @@ public class VectorizedRowBatch implements Writable {
     return b.toString();
   }
 
+  private void appendVectorType(StringBuilder b, ColumnVector cv) {
+    String colVectorType = null;
+    if (cv instanceof LongColumnVector) {
+      colVectorType = "LONG";
+    } else if (cv instanceof DoubleColumnVector) {
+      colVectorType = "DOUBLE";
+    } else if (cv instanceof BytesColumnVector) {
+      colVectorType = "BYTES";
+    } else if (cv instanceof DecimalColumnVector) {
+      colVectorType = "DECIMAL";
+    } else if (cv instanceof TimestampColumnVector) {
+      colVectorType = "TIMESTAMP";
+    } else if (cv instanceof IntervalDayTimeColumnVector) {
+      colVectorType = "INTERVAL_DAY_TIME";
+    } else if (cv instanceof ListColumnVector) {
+      colVectorType = "LIST";
+    } else if (cv instanceof MapColumnVector) {
+      colVectorType = "MAP";
+    } else if (cv instanceof StructColumnVector) {
+      colVectorType = "STRUCT";
+    } else if (cv instanceof UnionColumnVector) {
+      colVectorType = "UNION";
+    } else {
+      colVectorType = "Unknown";
+    }
+    b.append(colVectorType);
+
+    if (cv instanceof ListColumnVector) {
+      ListColumnVector listColumnVector = (ListColumnVector) cv;
+      b.append("<");
+      appendVectorType(b, listColumnVector.child);
+      b.append(">");
+    } else if (cv instanceof MapColumnVector) {
+      MapColumnVector mapColumnVector = (MapColumnVector) cv;
+      b.append("<");
+      appendVectorType(b, mapColumnVector.keys);
+      b.append(", ");
+      appendVectorType(b, mapColumnVector.values);
+      b.append(">");
+    } else if (cv instanceof StructColumnVector) {
+      StructColumnVector structColumnVector = (StructColumnVector) cv;
+      b.append("<");
+      final int fieldCount = structColumnVector.fields.length;
+      for (int i = 0; i < fieldCount; i++) {
+        if (i > 0) {
+          b.append(", ");
+        }
+        appendVectorType(b, structColumnVector.fields[i]);
+      }
+      b.append(">");
+    } else if (cv instanceof UnionColumnVector) {
+      UnionColumnVector unionColumnVector = (UnionColumnVector) cv;
+      b.append("<");
+      final int fieldCount = unionColumnVector.fields.length;
+      for (int i = 0; i < fieldCount; i++) {
+        if (i > 0) {
+          b.append(", ");
+        }
+        appendVectorType(b, unionColumnVector.fields[i]);
+      }
+      b.append(">");
+    }
+  }
+
   public String stringify(String prefix) {
     if (size == 0) {
       return "";
@@ -195,33 +259,10 @@ public class VectorizedRowBatch implements Writable {
       }
       b.append(projIndex);
       b.append(":");
-      String colVectorType = null;
-      if (cv instanceof LongColumnVector) {
-        colVectorType = "LONG";
-      } else if (cv instanceof DoubleColumnVector) {
-        colVectorType = "DOUBLE";
-      } else if (cv instanceof BytesColumnVector) {
-        colVectorType = "BYTES";
-      } else if (cv instanceof DecimalColumnVector) {
-        colVectorType = "DECIMAL";
-      } else if (cv instanceof TimestampColumnVector) {
-        colVectorType = "TIMESTAMP";
-      } else if (cv instanceof IntervalDayTimeColumnVector) {
-        colVectorType = "INTERVAL_DAY_TIME";
-      } else if (cv instanceof ListColumnVector) {
-        colVectorType = "LIST";
-      } else if (cv instanceof MapColumnVector) {
-        colVectorType = "MAP";
-      } else if (cv instanceof StructColumnVector) {
-        colVectorType = "STRUCT";
-      } else if (cv instanceof UnionColumnVector) {
-        colVectorType = "UNION";
-      } else {
-        colVectorType = "Unknown";
-      }
-      b.append(colVectorType);
+      appendVectorType(b, cv);
     }
     b.append('\n');
+    b.append(prefix);
 
     if (this.selectedInUse) {
       for (int j = 0; j < size; j++) {

http://git-wip-us.apache.org/repos/asf/hive/blob/e2d4f347/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
----------------------------------------------------------------------
diff --git a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
index 7bc03ed..3d9f262 100644
--- a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
+++ b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
@@ -119,7 +119,7 @@ public class TestStructColumnVector {
       byte[] buffer = ("value " + r).getBytes(StandardCharsets.UTF_8);
       y.setRef(r, buffer, 0, buffer.length);
     }
-    final String EXPECTED = ("Column vector types: 0:STRUCT, 1:BYTES\n" +
+    final String EXPECTED = ("Column vector types: 0:STRUCT<LONG, TIMESTAMP>, 1:BYTES\n" +
         "[[0, 2000-01-01 00:00:01.0], \"value 0\"]\n" +
         "[[3, 2000-01-01 00:00:02.0], \"value 1\"]\n" +
         "[[6, 2000-01-01 00:00:03.0], \"value 2\"]\n" +
@@ -153,7 +153,7 @@ public class TestStructColumnVector {
       byte[] buffer = ("value " + r).getBytes(StandardCharsets.UTF_8);
       y.setRef(r, buffer, 0, buffer.length);
     }
-    final String EXPECTED = ("Column vector types: 0:STRUCT, 1:BYTES\n" +
+    final String EXPECTED = ("Column vector types: 0:STRUCT<LONG, TIMESTAMP>, 1:BYTES\n" +
         "[[0, 2000-01-01 00:00:01], \"value 0\"]\n" +
         "[[3, 2000-01-01 00:00:02], \"value 1\"]\n" +
         "[[6, 2000-01-01 00:00:03], \"value 2\"]\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/e2d4f347/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
----------------------------------------------------------------------
diff --git a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
index fbb89a9..b5220a0 100644
--- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
+++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
@@ -1063,6 +1063,15 @@ public class GenVectorCode extends Task {
       {"IfExprScalarScalar", "long", "double"},
       {"IfExprScalarScalar", "double", "double"},
 
+      {"IfExprObjectColumnColumn", "timestamp"},
+      {"IfExprObjectColumnColumn", "interval_day_time"},
+      {"IfExprObjectColumnScalar", "timestamp"},
+      {"IfExprObjectColumnScalar", "interval_day_time"},
+      {"IfExprObjectScalarColumn", "timestamp"},
+      {"IfExprObjectScalarColumn", "interval_day_time"},
+      {"IfExprObjectScalarScalar", "timestamp"},
+      {"IfExprObjectScalarScalar", "interval_day_time"},
+
       // template, <ClassName>, <ValueType>, <OperatorSymbol>, <DescriptionName>, <DescriptionValue>
       {"VectorUDAFMinMax", "VectorUDAFMinLong", "long", "<", "min",
           "_FUNC_(expr) - Returns the minimum value of expr (vectorized, type: long)"},
@@ -1385,6 +1394,12 @@ public class GenVectorCode extends Task {
         generateIfExprScalarColumn(tdesc);
       } else if (tdesc[0].equals("IfExprScalarScalar")) {
         generateIfExprScalarScalar(tdesc);
+      } else if (
+          tdesc[0].equals("IfExprObjectColumnColumn") ||
+          tdesc[0].equals("IfExprObjectColumnScalar") ||
+          tdesc[0].equals("IfExprObjectScalarColumn") ||
+          tdesc[0].equals("IfExprObjectScalarScalar")) {
+        generateIfExprObject(tdesc);
       } else if (tdesc[0].equals("FilterDecimalColumnCompareDecimalScalar")) {
         generateFilterDecimalColumnCompareDecimalScalar(tdesc);
       } else if (tdesc[0].equals("FilterDecimalScalarCompareDecimalColumn")) {
@@ -2259,6 +2274,46 @@ public class GenVectorCode extends Task {
         className, templateString);
   }
 
+  private void generateIfExprObject(String [] tdesc) throws Exception {
+    String typeName = tdesc[1];
+    String objectName;
+    String scalarType;
+    String scalarImport;
+    if (typeName.equals("timestamp")) {
+      objectName = "Timestamp";
+      scalarType = "Timestamp";
+      scalarImport = "java.sql.Timestamp";
+    } else if (typeName.equals("interval_day_time")) {
+      objectName = "IntervalDayTime";
+      scalarType = "HiveIntervalDayTime";
+      scalarImport = "org.apache.hadoop.hive.common.type.HiveIntervalDayTime";
+    } else {
+      objectName = "unknown";
+      scalarType = "unknown";
+      scalarImport = "unknown";
+    }
+    String classNameSuffix = tdesc[0].substring("IfExprObject".length());
+
+    String writableType = getOutputWritableType(typeName);
+    String columnVectorType = getColumnVectorType(typeName);
+
+    String className = "IfExpr" + objectName + classNameSuffix;
+
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<ScalarType>", scalarType);
+    templateString = templateString.replaceAll("<ScalarImport>", scalarImport);
+    templateString = templateString.replaceAll("<TypeName>", typeName);
+    templateString = templateString.replaceAll("<ObjectName>", objectName);
+    templateString = templateString.replaceAll("<WritableType>", writableType);
+    templateString = templateString.replaceAll("<ColumnVectorType>", columnVectorType);
+
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
   // template, <ClassNamePrefix>, <ReturnType>, <FuncName>
   private void generateDecimalColumnUnaryFunc(String [] tdesc) throws Exception {
     String classNamePrefix = tdesc[1];