You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2018/06/22 17:46:31 UTC

[15/35] hive git commit: HIVE-12192: Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/orc_merge6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge6.q.out b/ql/src/test/results/clientpositive/orc_merge6.q.out
index 7c429d6..7f3d3d3 100644
--- a/ql/src/test/results/clientpositive/orc_merge6.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge6.q.out
@@ -38,17 +38,17 @@ STAGE PLANS:
           TableScan
             alias: orc_merge5_n4
             filterExpr: (userid <= 13L) (type: boolean)
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -57,18 +57,18 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int)
                   outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     keys: year (type: string), hour (type: int)
                     mode: hash
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string), _col1 (type: int)
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,bitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
       Reduce Operator Tree:
         Group By Operator
@@ -76,14 +76,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string), _col1 (type: int)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -156,9 +156,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 3 items
+Found 1 items
 #### A masked pattern was here ####
-Found 3 items
+Found 1 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
@@ -207,17 +207,17 @@ STAGE PLANS:
           TableScan
             alias: orc_merge5_n4
             filterExpr: (userid <= 13L) (type: boolean)
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -226,18 +226,18 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int)
                   outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     keys: year (type: string), hour (type: int)
                     mode: hash
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string), _col1 (type: int)
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,bitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
       Reduce Operator Tree:
         Group By Operator
@@ -245,14 +245,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string), _col1 (type: int)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -430,9 +430,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 3 items
+Found 1 items
 #### A masked pattern was here ####
-Found 3 items
+Found 1 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
index 6295714..c83c416 100644
--- a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
@@ -37,17 +37,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_merge5_n3
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -56,7 +56,7 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp)
                   outputColumnNames: userid, string1, subtype, decimal1, ts
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     mode: hash

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
index 95fa5ca..ef75520 100644
--- a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
@@ -37,14 +37,14 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_merge5
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                     output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -53,18 +53,18 @@ STAGE PLANS:
               Select Operator
                 expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), _col5 (type: double)
                 outputColumnNames: userid, string1, subtype, decimal1, ts, st
-                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                   keys: st (type: double)
                   mode: hash
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: double)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: double)
-                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,bitvector:binary>), _col5 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
       Reduce Operator Tree:
         Group By Operator
@@ -72,14 +72,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: double)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col3 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col5 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: double)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/orc_ppd_char.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_ppd_char.q.out b/ql/src/test/results/clientpositive/orc_ppd_char.q.out
index 846de53..95766b0 100644
--- a/ql/src/test/results/clientpositive/orc_ppd_char.q.out
+++ b/ql/src/test/results/clientpositive/orc_ppd_char.q.out
@@ -26,7 +26,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -35,7 +35,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -44,7 +44,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-334427804500
+336445133500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -53,7 +53,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-334427804500
+336445133500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -62,7 +62,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -71,7 +71,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -80,7 +80,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-81475875500
+85510533500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -89,7 +89,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-81475875500
+85510533500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -98,7 +98,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -107,7 +107,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "carrot")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -116,7 +116,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "carrot")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -125,7 +125,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "hello")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -134,7 +134,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "h
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-81475875500
+85510533500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "hello")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -143,7 +143,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "h
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-81475875500
+85510533500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("carrot")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -170,7 +170,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "carrot"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -179,7 +179,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--252951929000
+-250934600000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "zombie"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -188,7 +188,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-81475875500
+85510533500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "zombie"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -197,7 +197,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-81475875500
+85510533500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "carrot" and "carrot1"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
index 6dd6e3f..f1a137c 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -87,8 +87,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-              predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+              predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
               Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -246,8 +246,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -280,8 +280,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > 11)
-              AND ((ctimestamp2 != 12)
+          OR ((ctimestamp1 > -28789)
+              AND ((ctimestamp2 != -28788)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -355,8 +355,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -388,8 +388,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -417,8 +417,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-              predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+              predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
               Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -563,8 +563,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -597,8 +597,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -1.388)
-              AND ((ctimestamp2 != -1.3359999999999999)
+          OR ((ctimestamp1 > -28801.388)
+              AND ((ctimestamp2 != -28801.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
index e33e701..357d838 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0)
+        AND (((ctimestamp1 <= -28800)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -15)
+              OR ((ctimestamp2 > -28815)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0)
+        AND (((ctimestamp1 <= -28800)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -15)
+              OR ((ctimestamp2 > -28815)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -74,8 +74,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-              predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+              predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
               Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -155,11 +155,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0)
+        AND (((ctimestamp1 <= -28800)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -15)
+              OR ((ctimestamp2 > -28815)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -183,11 +183,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0)
+        AND (((ctimestamp1 <= -28800)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -15)
+              OR ((ctimestamp2 > -28815)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -237,11 +237,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0.0)
+        AND (((ctimestamp1 <= -28800.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > 7.6850000000000005)
+              OR ((ctimestamp2 > -28792.3149999999999995)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -264,11 +264,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0.0)
+        AND (((ctimestamp1 <= -28800.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > 7.6850000000000005)
+              OR ((ctimestamp2 > -28792.3149999999999995)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -294,8 +294,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-              predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+              predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
               Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -369,11 +369,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0.0)
+        AND (((ctimestamp1 <= -28800.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > 7.6850000000000005)
+              OR ((ctimestamp2 > -28792.3149999999999995)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -397,11 +397,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= 0.0)
+        AND (((ctimestamp1 <= -28800.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > 7.6850000000000005)
+              OR ((ctimestamp2 > -28792.3149999999999995)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
index 485bfe7..5a8c069 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
@@ -89,13 +89,13 @@ POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test_parquet where cin
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test_parquet
 #### A masked pattern was here ####
-1970-01-06	-7959.5837837838
-1970-01-06	-2516.4135135135
-1970-01-06	-9445.0621621622
-1970-01-06	-5713.7459459459
-1970-01-06	8963.6405405405
-1970-01-06	4193.6243243243
-1970-01-06	2964.3864864865
-1970-01-06	-4673.2540540541
-1970-01-06	-9216.8945945946
-1970-01-06	-9287.3756756757
+1970-01-07	-7959.5837837838
+1970-01-07	-2516.4135135135
+1970-01-07	-9445.0621621622
+1970-01-07	-5713.7459459459
+1970-01-07	8963.6405405405
+1970-01-07	4193.6243243243
+1970-01-07	2964.3864864865
+1970-01-07	-4673.2540540541
+1970-01-07	-9216.8945945946
+1970-01-07	-9287.3756756757

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/results/clientpositive/partition_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partition_timestamp.q.out b/ql/src/test/results/clientpositive/partition_timestamp.q.out
index 34f70a5..a80ed2b 100644
--- a/ql/src/test/results/clientpositive/partition_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/partition_timestamp.q.out
@@ -14,79 +14,79 @@ PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-
   select * from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
+PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 01:00:00', region= '1')
   select * from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2')
   select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
+PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2')
   select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20')
   select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
+PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20')
   select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1')
   select * from src tablesample (20 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
+PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1')
   select * from src tablesample (20 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10')
   select * from src tablesample (11 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
+PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10')
   select * from src tablesample (11 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: select distinct dt from partition_timestamp_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp_1
-PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
-PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
-PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
-PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
-PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
+PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
+PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
+PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
 #### A masked pattern was here ####
 POSTHOOK: query: select distinct dt from partition_timestamp_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp_1
-POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
-POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
-POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
-POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
-POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
+POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
+POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
+POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
 #### A masked pattern was here ####
 2000-01-01 01:00:00
 2000-01-01 02:00:00