You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/06/28 22:48:19 UTC

[23/52] [abbrv] hive git commit: HIVE-20007: Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_13.q.out b/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
index 81d080b..4d1fd3f 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -84,7 +84,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesorc
-                  filterExpr: (((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1)) or ((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639))) (type: boolean)
+                  filterExpr: (((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1)) or ((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639))) (type: boolean)
                   Statistics: Num rows: 12288 Data size: 2907994 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
@@ -93,8 +93,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -269,8 +269,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -303,8 +303,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -378,8 +378,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -411,8 +411,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -438,7 +438,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesorc
-                  filterExpr: (((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1)) or ((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639))) (type: boolean)
+                  filterExpr: (((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1)) or ((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639))) (type: boolean)
                   Statistics: Num rows: 12288 Data size: 2907994 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
@@ -446,8 +446,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -598,8 +598,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -632,8 +632,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
index 981e49b..a478719 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
@@ -12,9 +12,9 @@ POSTHOOK: Lineage: date_decimal_test.cdate EXPRESSION [(alltypesorc)alltypesorc.
 POSTHOOK: Lineage: date_decimal_test.cdecimal EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
-PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -45,12 +45,12 @@ STAGE PLANS:
                     predicate: (cdouble is not null and cint is not null) (type: boolean)
                     Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: cdate (type: date), cdecimal (type: decimal(20,10))
-                      outputColumnNames: _col0, _col1
+                      expressions: cdate (type: date), cint (type: int), cdecimal (type: decimal(20,10))
+                      outputColumnNames: _col0, _col1, _col2
                       Select Vectorization:
                           className: VectorSelectOperator
                           native: true
-                          projectedOutputColumnNums: [2, 3]
+                          projectedOutputColumnNums: [2, 0, 3]
                       Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE
                       Limit
                         Number of rows: 10
@@ -85,21 +85,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-1970-01-06	-7959.5837837838
-1970-01-06	-2516.4135135135
-1970-01-06	-9445.0621621622
-1970-01-06	-5713.7459459459
-1970-01-06	8963.6405405405
-1970-01-06	4193.6243243243
-1970-01-06	2964.3864864865
-1970-01-06	-4673.2540540541
-1970-01-06	-9216.8945945946
-1970-01-06	-9287.3756756757
+1970-01-07	528534767	-7959.5837837838
+1970-01-07	528534767	-2516.4135135135
+1970-01-07	528534767	-9445.0621621622
+1970-01-07	528534767	-5713.7459459459
+1970-01-07	528534767	8963.6405405405
+1970-01-07	528534767	4193.6243243243
+1970-01-07	528534767	2964.3864864865
+1970-01-07	528534767	-4673.2540540541
+1970-01-07	528534767	-9216.8945945946
+1970-01-07	528534767	-9287.3756756757

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
index d4b986c..4d3c0b9 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
@@ -264,7 +264,7 @@ WHERE  ((762 = cbigint)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1.6000018929276082E8	1.5999646129276082E8	-1.5999646129276082E8	1.5999646129276082E8	2.5598867626205912E16	-8706342.964000002	-1.6000018929276082E8	5.481251832900263E8	4.095728233294762E24	8549.657499338193	-5.481251832900263E8	3.8812872199726546E8	2.12743126884874784E17	3.0054786945575117E17	-5.700752675298234	-3.0054786945575117E17	3.0054786945575117E17	973579.3664121248	5.482224634724039E8	-973579.3664121248	-18.377427808018613	-64	2044	-6.573680812059058E-5	18.377427808018613
+-1.2803533196894065E7	-1.2807261196894065E7	1.2807261196894065E7	-1.2807261196894065E7	1.6402593936546838E14	-275125.557	1.2803533196894065E7	6.102557176084042E8	-2.1007230485194618E21	9480.304481867239	-6.102557176084042E8	6.230629788052982E8	3.8022774524605715E17	3.7261870682317882E17	-11.503947368421052	-3.7261870682317882E17	3.7261870682317882E17	1083935.5552547143	6.104250214589658E8	-1083935.5552547143	46.53705506862114	-51	1029	-4.705076768887381E-5	-46.53705506862114
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT MAX(cint),
        (MAX(cint) / -3728),
@@ -983,7 +983,7 @@ WHERE  (((ctimestamp2 <= ctimestamp1)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--0.5934409161894847	6980.406559083811	6979.813118167622	2141851355	-11761.597368421053	-6980.406559083811	1.5852855222071928E8	-0.5934409161894847	2.5099887741860824E16	1.52140608502098611E18	-2141851355	-13.510823917813244	79.553	-3.998255191435152E19
+17.0	6998.0	7015.0	1942088700	412.6470588235294	-6998.0	1.7455632335840696E8	17.0	2.9018961928004512E16	1.0774839990192407E18	-1942088700	-11.125857045077739	17.0	-2.8316279494225646E19
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT cint,
        cdouble,
@@ -3709,7 +3709,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3717,7 +3717,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: []
-                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       Group By Vectorization:
@@ -3820,7 +3820,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3830,7 +3830,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0]
-                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(ctinyint)
                       Group By Vectorization:
@@ -3933,7 +3933,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3943,7 +3943,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [2]
-                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cint)
                       Group By Vectorization:
@@ -4046,7 +4046,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -4056,7 +4056,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [4]
-                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cfloat)
                       Group By Vectorization:
@@ -4159,7 +4159,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -4169,7 +4169,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [6]
-                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cstring1)
                       Group By Vectorization:
@@ -4272,7 +4272,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -4282,7 +4282,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [10]
-                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cboolean1)
                       Group By Vectorization:

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
index 1827f67..1791c89 100644
--- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
@@ -262,13 +262,13 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), dayofmonth(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
-                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
+                    expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18]
-                        selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boole
 an, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp
+                        projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17]
+                        selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:ti
 mestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp
                     Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: bigint)
@@ -278,7 +278,7 @@ STAGE PLANS:
                           native: true
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
+                      value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
             Execution mode: vectorized
             Map Vectorization:
                 enabled: true
@@ -299,12 +299,12 @@ STAGE PLANS:
                 vectorized: true
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int), VALUE._col8 (type: boolean), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp), VALUE._col15 (type: timestamp)
+                expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: boolean), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                 Select Vectorization:
                     className: VectorSelectOperator
                     native: true
-                    projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
+                    projectedOutputColumnNums: [0, 1, 2, 3, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
                 Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
@@ -369,14 +369,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--45479000681	528	10	27	27	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
-1632478712	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
-1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
-1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
-1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
-163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
-163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
-490725011	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
+-45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
+1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
+1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
+1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
+163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
+163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
+490699811	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:47.183	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:47.183	NULL	1969-12-31 15:59:47.183
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:52.843	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:52.843	NULL	1969-12-31 15:59:52.843
@@ -470,7 +470,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -561,14 +561,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--2736243926	1883	4	17	17	16	4	14	34
--62018170411	4	9	22	22	39	18	26	29
-1365579826	2013	4	10	10	15	0	43	46
-206731024925	8521	1	16	16	3	20	42	5
-271201265	1978	8	5	5	31	14	41	5
-501208674	1985	11	18	18	47	16	37	54
-501208674	1985	11	18	18	47	16	37	54
-94573848655	4966	12	4	4	49	9	30	55
+-2736272726	1883	4	17	17	16	4	14	34
+-62018199211	4	9	24	22	39	18	26	29
+1365554626	2013	4	10	10	15	0	43	46
+206730996125	8521	1	16	16	3	20	42	5
+271176065	1978	8	5	5	31	14	41	5
+501179874	1985	11	18	18	47	16	37	54
+501179874	1985	11	18	18	47	16	37	54
+94573819855	4966	12	4	4	49	9	30	55
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
@@ -662,7 +662,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (dayofmonth(ctimestamp1) = dayofmonth(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
+                    expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -854,7 +854,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -945,7 +945,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	11	30	NULL	48	NULL	NULL	NULL
+NULL	2	12	2	NULL	49	4	40	39
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
@@ -1206,7 +1206,7 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-2.89160863229166E11
+2.89160478029166E11
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   round(avg(ctimestamp1), 0),
   variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19,
@@ -1363,4 +1363,4 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-3.6145107904E10	false	false	false	7.5245155692476E10	7.5245155692476E10	7.5245155692476E10	8.0440455033059E10
+3.6145059754E10	false	false	false	7.5245178084814E10	7.5245178084814E10	7.5245178084814E10	8.0440478971476E10

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out
new file mode 100644
index 0000000..5fb5762
--- /dev/null
+++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out
@@ -0,0 +1,740 @@
+PREHOOK: query: CREATE TEMPORARY FUNCTION runWorker AS 'org.apache.hadoop.hive.ql.udf.UDFRunWorker'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: runworker
+POSTHOOK: query: CREATE TEMPORARY FUNCTION runWorker AS 'org.apache.hadoop.hive.ql.udf.UDFRunWorker'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: runworker
+PREHOOK: query: create table mydual(a int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@mydual
+POSTHOOK: query: create table mydual(a int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@mydual
+PREHOOK: query: insert into mydual values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@mydual
+POSTHOOK: query: insert into mydual values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@mydual
+POSTHOOK: Lineage: mydual.a SCRIPT []
+PREHOOK: query: CREATE TABLE over10k_n2(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           `dec` decimal(4,2),
+           bin binary)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@over10k_n2
+POSTHOOK: query: CREATE TABLE over10k_n2(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           `dec` decimal(4,2),
+           bin binary)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@over10k_n2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over10k_n2
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@over10k_n2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over10k_n2
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@over10k_n2
+PREHOOK: query: CREATE TABLE over10k_orc_bucketed(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           `dec` decimal(4,2),
+           bin binary) CLUSTERED BY(si) INTO 4 BUCKETS STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: query: CREATE TABLE over10k_orc_bucketed(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           `dec` decimal(4,2),
+           bin binary) CLUSTERED BY(si) INTO 4 BUCKETS STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@over10k_orc_bucketed
+PREHOOK: query: select distinct si, si%4 from over10k_n2 order by si
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_n2
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select distinct si, si%4 from over10k_n2 order by si
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_n2
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL	NULL
+256	0
+257	1
+258	2
+259	3
+260	0
+261	1
+262	2
+263	3
+264	0
+265	1
+266	2
+267	3
+268	0
+269	1
+270	2
+271	3
+272	0
+273	1
+274	2
+275	3
+276	0
+277	1
+278	2
+279	3
+280	0
+281	1
+282	2
+283	3
+284	0
+285	1
+286	2
+287	3
+288	0
+289	1
+290	2
+291	3
+292	0
+293	1
+294	2
+295	3
+296	0
+297	1
+298	2
+299	3
+300	0
+301	1
+302	2
+303	3
+304	0
+305	1
+306	2
+307	3
+308	0
+309	1
+310	2
+311	3
+312	0
+313	1
+314	2
+315	3
+316	0
+317	1
+318	2
+319	3
+320	0
+321	1
+322	2
+323	3
+324	0
+325	1
+326	2
+327	3
+328	0
+329	1
+330	2
+331	3
+332	0
+333	1
+334	2
+335	3
+336	0
+337	1
+338	2
+339	3
+340	0
+341	1
+342	2
+343	3
+344	0
+345	1
+346	2
+347	3
+348	0
+349	1
+350	2
+351	3
+352	0
+353	1
+354	2
+355	3
+356	0
+357	1
+358	2
+359	3
+360	0
+361	1
+362	2
+363	3
+364	0
+365	1
+366	2
+367	3
+368	0
+370	2
+371	3
+372	0
+373	1
+374	2
+375	3
+376	0
+377	1
+378	2
+379	3
+380	0
+381	1
+382	2
+383	3
+384	0
+385	1
+386	2
+387	3
+388	0
+389	1
+390	2
+391	3
+392	0
+393	1
+394	2
+395	3
+396	0
+397	1
+398	2
+399	3
+400	0
+401	1
+402	2
+403	3
+404	0
+405	1
+406	2
+407	3
+408	0
+409	1
+410	2
+411	3
+413	1
+414	2
+415	3
+417	1
+418	2
+419	3
+420	0
+421	1
+422	2
+423	3
+424	0
+425	1
+426	2
+427	3
+428	0
+429	1
+430	2
+431	3
+432	0
+433	1
+434	2
+435	3
+436	0
+437	1
+438	2
+439	3
+440	0
+441	1
+442	2
+443	3
+444	0
+445	1
+446	2
+447	3
+448	0
+449	1
+450	2
+451	3
+452	0
+453	1
+454	2
+455	3
+456	0
+457	1
+458	2
+459	3
+460	0
+461	1
+462	2
+463	3
+464	0
+465	1
+466	2
+467	3
+468	0
+469	1
+471	3
+472	0
+473	1
+474	2
+475	3
+476	0
+477	1
+478	2
+479	3
+480	0
+481	1
+482	2
+483	3
+484	0
+485	1
+486	2
+487	3
+488	0
+489	1
+490	2
+491	3
+492	0
+493	1
+494	2
+495	3
+496	0
+497	1
+498	2
+499	3
+500	0
+501	1
+502	2
+503	3
+504	0
+505	1
+506	2
+507	3
+508	0
+509	1
+510	2
+511	3
+PREHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_n2
+PREHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_n2
+POSTHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: Lineage: over10k_orc_bucketed.b SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:b, type:bigint, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.bin SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bin, type:binary, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.bo SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.d SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.dec SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.f SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.i SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.s SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.si SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.t SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:t, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.ts SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:ts, type:timestamp, comment:null), ]
+Found 4 items
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
+PREHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_n2
+PREHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_n2
+POSTHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: Lineage: over10k_orc_bucketed.b SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:b, type:bigint, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.bin SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bin, type:binary, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.bo SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.d SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.dec SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.f SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.i SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.s SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.si SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.t SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:t, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: over10k_orc_bucketed.ts SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:ts, type:timestamp, comment:null), ]
+Found 8 items
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
+PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+7	hdfs://### HDFS PATH ###
+PREHOOK: query: alter table over10k_orc_bucketed set TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: query: alter table over10k_orc_bucketed set TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: default@over10k_orc_bucketed
+PREHOOK: query: explain select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by t, si, i
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by t, si, i
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: over10k_orc_bucketed
+                  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
+                  Statistics: Num rows: 2098 Data size: 41920 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
+                    Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: t (type: tinyint), si (type: smallint), i (type: int)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int)
+                        sort order: +++
+                        Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: vectorized
+        Reducer 2 
+            Execution mode: vectorized
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: int)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by  t, si, i
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by  t, si, i
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-3	344	65733
+-3	344	65733
+5	501	65585
+5	501	65585
+35	463	65646
+35	463	65646
+PREHOOK: query: explain select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: over10k_orc_bucketed
+                  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
+                  Statistics: Num rows: 2098 Data size: 41920 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
+                    Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), t (type: tinyint), si (type: smallint), i (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                        sort order: +
+                        Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: tinyint), _col2 (type: smallint), _col3 (type: int)
+            Execution mode: vectorized
+        Reducer 2 
+            Execution mode: vectorized
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+{"writeid":0,"bucketid":536870912,"rowid":104}	5	501	65585
+{"writeid":0,"bucketid":536870912,"rowid":420}	5	501	65585
+{"writeid":0,"bucketid":536936448,"rowid":37}	-3	344	65733
+{"writeid":0,"bucketid":536936448,"rowid":295}	-3	344	65733
+{"writeid":0,"bucketid":537067520,"rowid":173}	35	463	65646
+{"writeid":0,"bucketid":537067520,"rowid":406}	35	463	65646
+PREHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: over10k_orc_bucketed
+                  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
+                  Statistics: Num rows: 2098 Data size: 706986 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
+                    Statistics: Num rows: 2 Data size: 674 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), t (type: tinyint), si (type: smallint), f (type: float), d (type: double), bo (type: boolean), s (type: string), ts (type: timestamp), dec (type: decimal(4,2)), bin (type: binary)
+                      outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+                      Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                        sort order: +
+                        Map-reduce partition columns: UDFToInteger(_col0) (type: int)
+                        Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: tinyint), _col2 (type: smallint), _col5 (type: float), _col6 (type: double), _col7 (type: boolean), _col8 (type: string), _col9 (type: timestamp), _col10 (type: decimal(4,2)), _col11 (type: binary)
+            Execution mode: vectorized
+        Reducer 2 
+            Execution mode: vectorized
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), 0 (type: int), 4294967363L (type: bigint), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: boolean), VALUE._col6 (type: string), VALUE._col7 (type: timestamp), VALUE._col8 (type: decimal(4,2)), VALUE._col9 (type: binary)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+                Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                      name: default.over10k_orc_bucketed
+                  Write Type: UPDATE
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: default.over10k_orc_bucketed
+          Write Type: UPDATE
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+
+PREHOOK: query: update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: default@over10k_orc_bucketed
+POSTHOOK: query: update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: default@over10k_orc_bucketed
+PREHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+{"writeid":### Masked writeid ###,"bucketid":536870912,"rowid":0}	5	501	0
+{"writeid":### Masked writeid ###,"bucketid":536870912,"rowid":1}	5	501	0
+{"writeid":### Masked writeid ###,"bucketid":536936448,"rowid":0}	-3	344	0
+{"writeid":### Masked writeid ###,"bucketid":536936448,"rowid":1}	-3	344	0
+{"writeid":### Masked writeid ###,"bucketid":537067520,"rowid":0}	35	463	0
+{"writeid":### Masked writeid ###,"bucketid":537067520,"rowid":1}	35	463	0
+PREHOOK: query: explain select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: over10k_orc_bucketed
+                  Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                    outputColumnNames: ROW__ID
+                    Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: count()
+                      keys: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                        Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: bigint)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                Filter Operator
+                  predicate: (_col1 > 1L) (type: boolean)
+                  Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+PREHOOK: query: select ROW__ID, * from over10k_orc_bucketed where ROW__ID is null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k_orc_bucketed
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select ROW__ID, * from over10k_orc_bucketed where ROW__ID is null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k_orc_bucketed
+POSTHOOK: Output: hdfs://### HDFS PATH ###

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
index 88499fd..2531f4a 100644
--- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
+++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
@@ -370,10 +370,10 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
 Found 4 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k_n9
@@ -394,14 +394,14 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
 Found 8 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k_orc_bucketed_n0
@@ -680,22 +680,22 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: over10k_orc_bucketed_n0
-                  Statistics: Num rows: 1237 Data size: 707670 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                     outputColumnNames: ROW__ID
-                    Statistics: Num rows: 1237 Data size: 707670 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       keys: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                       mode: hash
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
         Reducer 2 
             Reduce Operator Tree:
@@ -704,13 +704,13 @@ STAGE PLANS:
                 keys: KEY._col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (_col1 > 1L) (type: boolean)
-                  Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat