You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2018/08/02 10:55:35 UTC

[2/6] hive git commit: HIVE-20260: NDV of a column shouldn't be scaled when row count is changed by filter on another column (Zoltan Haindrich reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out b/ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out
index 55c581f..e44639d 100644
--- a/ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out
@@ -108,11 +108,11 @@ STAGE PLANS:
                   1 UDFToInteger(_col0) (type: int)
                   2 (UDFToInteger(_col0) + 0) (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col1 (type: smallint), _col0 (type: tinyint), _col2 (type: int)
                   sort order: +++
-                  Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean)
         Reducer 3 
             Execution mode: vectorized, llap
@@ -120,10 +120,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey1 (type: tinyint), KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: timestamp), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -302,11 +302,11 @@ STAGE PLANS:
                   1 UDFToInteger(_col0) (type: int)
                   2 (UDFToInteger(_col0) + 0) (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col1 (type: smallint), _col0 (type: tinyint), _col2 (type: int)
                   sort order: +++
-                  Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean)
         Reducer 3 
             Execution mode: vectorized, llap
@@ -314,10 +314,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey1 (type: tinyint), KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: timestamp), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -496,11 +496,11 @@ STAGE PLANS:
                   1 UDFToInteger(_col0) (type: int)
                   2 (UDFToInteger(_col0) + 0) (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col1 (type: smallint), _col0 (type: tinyint), _col2 (type: int)
                   sort order: +++
-                  Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean)
         Reducer 3 
             Execution mode: vectorized, llap
@@ -508,10 +508,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey1 (type: tinyint), KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: string), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: timestamp), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1715 Data size: 293140 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 791 Data size: 6700 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
index 47a0be9..f006e37 100644
--- a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
@@ -3804,13 +3804,13 @@ STAGE PLANS:
                         keys: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                         mode: hash
                         outputColumnNames: _col0, _col1, _col2, _col3
-                        Statistics: Num rows: 332 Data size: 104130 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 332 Data size: 98612 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
                           key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                           null sort order: aaaa
                           sort order: ++++
                           Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                          Statistics: Num rows: 332 Data size: 104130 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 332 Data size: 98612 Basic stats: COMPLETE Column stats: COMPLETE
                           tag: -1
                           auto parallelism: true
             Execution mode: vectorized
@@ -4037,14 +4037,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: string)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 332 Data size: 104130 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 332 Data size: 98612 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
                   GlobalTableId: 1
                   directory: hdfs://### HDFS PATH ###
                   NumFilesPerFileSink: 1
                   Static Partition Specification: ds=2/
-                  Statistics: Num rows: 332 Data size: 104130 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 332 Data size: 98612 Basic stats: COMPLETE Column stats: COMPLETE
                   Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
@@ -4073,7 +4073,7 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), '2' (type: string)
                   outputColumnNames: k1, k2, k3, k4, ds
-                  Statistics: Num rows: 332 Data size: 132350 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 332 Data size: 126832 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: compute_stats(k1, 'hll'), compute_stats(k2, 'hll'), compute_stats(k3, 'hll'), compute_stats(k4, 'hll')
                     keys: ds (type: string)
@@ -4141,22 +4141,22 @@ STAGE PLANS:
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col4, _col5
                 Position of Big Table: 0
-                Statistics: Num rows: 166 Data size: 45034 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 166 Data size: 39516 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 166 Data size: 45034 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 166 Data size: 39516 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     keys: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                     mode: hash
                     outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 332 Data size: 104130 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 332 Data size: 98612 Basic stats: COMPLETE Column stats: COMPLETE
                     Reduce Output Operator
                       key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                       null sort order: aaaa
                       sort order: ++++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                      Statistics: Num rows: 332 Data size: 104130 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 332 Data size: 98612 Basic stats: COMPLETE Column stats: COMPLETE
                       tag: -1
                       auto parallelism: true
         Union 2 

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out b/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
index 998d497..36be963 100644
--- a/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
+++ b/ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out
@@ -1782,10 +1782,10 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 65 Data size: 35360 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 83 Data size: 45152 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 65 Data size: 35360 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 83 Data size: 45152 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out
index f8220e1..daaf17f 100644
--- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out
@@ -767,7 +767,7 @@ STAGE PLANS:
                 keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: bigint)
                 mode: final
                 outputColumnNames: _col0, _col1, _col3
-                Statistics: Num rows: 12 Data size: 2232 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 9 Data size: 1674 Basic stats: COMPLETE Column stats: COMPLETE
                 pruneGroupingSetId: true
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint)
@@ -776,13 +776,13 @@ STAGE PLANS:
                       className: VectorSelectOperator
                       native: true
                       projectedOutputColumnNums: [0, 1, 2]
-                  Statistics: Num rows: 12 Data size: 2136 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 9 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
                     compressed: false
                     File Sink Vectorization:
                         className: VectorFileSinkOperator
                         native: false
-                    Statistics: Num rows: 12 Data size: 2136 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 9 Data size: 1602 Basic stats: COMPLETE Column stats: COMPLETE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out
index c31b759..94733cf 100644
--- a/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out
@@ -151,7 +151,7 @@ STAGE PLANS:
                         keys: _col0 (type: int)
                         mode: hash
                         outputColumnNames: _col0
-                        Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
                           key expressions: _col0 (type: int)
                           sort order: +
@@ -160,7 +160,7 @@ STAGE PLANS:
                               className: VectorReduceSinkLongOperator
                               native: true
                               nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
-                          Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: vectorized, llap
             LLAP IO: no inputs
             Map Vectorization:
@@ -206,7 +206,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col3
                   input vertices:
                     1 Map 3
-                  Statistics: Num rows: 7 Data size: 84 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 14 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE
                   Map Join Operator
                     condition map:
                          Left Semi Join 0 to 1
@@ -404,7 +404,7 @@ STAGE PLANS:
                         native: true
                         predicateExpression: FilterExprAndExpr(children: FilterStringGroupColEqualStringScalar(col 14:string, val AIR), FilterLongColEqualLongScalar(col 3:int, val 1), SelectColumnIsNotNull(col 0:int))
                     predicate: ((l_linenumber = 1) and (l_shipmode = 'AIR') and l_orderkey is not null) (type: boolean)
-                    Statistics: Num rows: 14 Data size: 1344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 2 Data size: 192 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: l_orderkey (type: int), 1 (type: int)
                       outputColumnNames: _col0, _col1
@@ -413,7 +413,7 @@ STAGE PLANS:
                           native: true
                           projectedOutputColumnNums: [0, 17]
                           selectExpressions: ConstantVectorExpression(val 1) -> 17:int
-                      Statistics: Num rows: 14 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
                         Group By Vectorization:
                             className: VectorGroupByOperator
@@ -425,7 +425,7 @@ STAGE PLANS:
                         keys: _col0 (type: int), _col1 (type: int)
                         mode: hash
                         outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
                           key expressions: _col0 (type: int), _col1 (type: int)
                           sort order: ++
@@ -434,7 +434,7 @@ STAGE PLANS:
                               className: VectorReduceSinkMultiKeyOperator
                               native: true
                               nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
-                          Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: vectorized, llap
             LLAP IO: no inputs
             Map Vectorization:
@@ -480,7 +480,7 @@ STAGE PLANS:
                   outputColumnNames: _col0, _col1, _col3, _col4
                   input vertices:
                     1 Map 3
-                  Statistics: Num rows: 7 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 14 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE
                   Map Join Operator
                     condition map:
                          Left Semi Join 0 to 1
@@ -494,7 +494,7 @@ STAGE PLANS:
                     outputColumnNames: _col0, _col3
                     input vertices:
                       1 Map 4
-                    Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col0 (type: int), _col3 (type: int)
                       outputColumnNames: _col0, _col1
@@ -502,13 +502,13 @@ STAGE PLANS:
                           className: VectorSelectOperator
                           native: true
                           projectedOutputColumnNums: [0, 2]
-                      Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
                         File Sink Vectorization:
                             className: VectorFileSinkOperator
                             native: false
-                        Statistics: Num rows: 7 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out
index 5dffd15..d9b62a9 100644
--- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out
@@ -145,7 +145,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col1 (type: string)
                 outputColumnNames: _col1, _col2, _col3
-                Statistics: Num rows: 36 Data size: 284 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 27 Data size: 212 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: sum(_col3), sum(_col1)
                   keys: _col2 (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out
index 75c393e9..55908e5 100644
--- a/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out
@@ -985,7 +985,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col1 (type: string)
                 outputColumnNames: _col1, _col2, _col3
-                Statistics: Num rows: 36 Data size: 284 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 27 Data size: 212 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: sum(_col3), sum(_col1)
                   keys: _col2 (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorization_0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_0.q.out b/ql/src/test/results/clientpositive/llap/vectorization_0.q.out
index 7abdc3c..fde799e 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_0.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_0.q.out
@@ -30631,17 +30631,17 @@ STAGE PLANS:
                   Filter Operator
                     isSamplingPred: false
                     predicate: (((cint = 45) and (cfloat = 3.02)) or ((cint = 47) and (cfloat = 2.09)) or ((cint = 49) and (cfloat = 3.5))) (type: boolean)
-                    Statistics: Num rows: 6 Data size: 1630 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 3 Data size: 930 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                      Statistics: Num rows: 6 Data size: 1630 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 3 Data size: 930 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
                         GlobalTableId: 0
 #### A masked pattern was here ####
                         NumFilesPerFileSink: 1
-                        Statistics: Num rows: 6 Data size: 1630 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 3 Data size: 930 Basic stats: COMPLETE Column stats: COMPLETE
 #### A masked pattern was here ####
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -30870,17 +30870,17 @@ STAGE PLANS:
                   Filter Operator
                     isSamplingPred: false
                     predicate: (((cint = 45) or (cfloat = 3.02)) and ((cint = 47) or (cfloat = 2.09)) and ((cint = 49) or (cfloat = 3.5))) (type: boolean)
-                    Statistics: Num rows: 27 Data size: 6990 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 2 Data size: 620 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                      Statistics: Num rows: 27 Data size: 6990 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 2 Data size: 620 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
                         GlobalTableId: 0
 #### A masked pattern was here ####
                         NumFilesPerFileSink: 1
-                        Statistics: Num rows: 27 Data size: 6990 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 2 Data size: 620 Basic stats: COMPLETE Column stats: COMPLETE
 #### A masked pattern was here ####
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out b/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out
index f7c00f8..3d5bea1 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_limit.q.out
@@ -909,7 +909,7 @@ STAGE PLANS:
                       keys: cdouble (type: double)
                       mode: hash
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 4127 Data size: 57672 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: double)
                         sort order: +
@@ -920,7 +920,7 @@ STAGE PLANS:
                             native: true
                             nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                             valueColumnNums: [1]
-                        Statistics: Num rows: 4127 Data size: 57672 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
             Execution mode: vectorized, llap
             LLAP IO: all inputs
@@ -968,7 +968,7 @@ STAGE PLANS:
                 keys: KEY._col0 (type: double)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 4127 Data size: 57672 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col1 (type: bigint), _col0 (type: double)
                   sort order: ++
@@ -978,7 +978,7 @@ STAGE PLANS:
                       native: true
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       valueColumnNums: []
-                  Statistics: Num rows: 4127 Data size: 57672 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE
                   TopN Hash Memory Usage: 0.3
         Reducer 3 
             Execution mode: vectorized, llap
@@ -1003,7 +1003,7 @@ STAGE PLANS:
                     className: VectorSelectOperator
                     native: true
                     projectedOutputColumnNums: [1, 0]
-                Statistics: Num rows: 4127 Data size: 57672 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE
                 Limit
                   Number of rows: 20
                   Limit Vectorization:

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
index 3830f41..f5bf886 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
@@ -620,7 +620,7 @@ STAGE PLANS:
                         native: true
                         predicateExpression: FilterExprOrExpr(children: FilterTimestampColEqualTimestampColumn(col 8:timestamp, col 9:timestamp), FilterDoubleColEqualDoubleScalar(col 4:float, val 762.0), FilterStringGroupColEqualStringScalar(col 6:string, val ss), FilterExprAndExpr(children: FilterLongColLessEqualLongColumn(col 1:bigint, col 3:bigint)(children: col 1:smallint), FilterLongColEqualLongScalar(col 11:boolean, val 1)), FilterExprAndExpr(children: SelectColumnIsNotNull(col 10:boolean), SelectColumnIsNotNull(col 9:timestamp), FilterStringGroupColGreaterStringScalar(col 7:string, val a)))
                     predicate: (((UDFToLong(csmallint) <= cbigint) and (cboolean2 = 1)) or (cboolean1 is not null and ctimestamp2 is not null and (cstring2 > 'a')) or (cfloat = 762) or (cstring1 = 'ss') or (ctimestamp1 = ctimestamp2)) (type: boolean)
-                    Statistics: Num rows: 12288 Data size: 3093170 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 11346 Data size: 2856120 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: cbigint (type: bigint), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cdouble (type: double), UDFToDouble(cbigint) (type: double), (UDFToDouble(cbigint) * UDFToDouble(cbigint)) (type: double), UDFToDouble(csmallint) (type: double), (UDFToDouble(csmallint) * UDFToDouble(csmallint)) (type: double), (cdouble * cdouble) (type: double)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
@@ -629,7 +629,7 @@ STAGE PLANS:
                           native: true
                           projectedOutputColumnNums: [3, 0, 1, 2, 5, 13, 16, 14, 18, 15]
                           selectExpressions: CastLongToDouble(col 3:bigint) -> 13:double, DoubleColMultiplyDoubleColumn(col 14:double, col 15:double)(children: CastLongToDouble(col 3:bigint) -> 14:double, CastLongToDouble(col 3:bigint) -> 15:double) -> 16:double, CastLongToDouble(col 1:smallint) -> 14:double, DoubleColMultiplyDoubleColumn(col 15:double, col 17:double)(children: CastLongToDouble(col 1:smallint) -> 15:double, CastLongToDouble(col 1:smallint) -> 17:double) -> 18:double, DoubleColMultiplyDoubleColumn(col 5:double, col 5:double) -> 15:double
-                      Statistics: Num rows: 12288 Data size: 3093170 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 11346 Data size: 2856120 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
                         aggregations: sum(_col6), sum(_col5), count(_col0), count(), max(_col1), sum(_col8), sum(_col7), count(_col2), max(_col3), sum(_col9), sum(_col4), count(_col4), count(_col1), sum(_col1)
                         Group By Vectorization:
@@ -2216,7 +2216,7 @@ STAGE PLANS:
                         keys: _col0 (type: smallint)
                         mode: hash
                         outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-                        Statistics: Num rows: 1156 Data size: 77440 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1251 Data size: 83804 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
                           key expressions: _col0 (type: smallint)
                           sort order: +
@@ -2225,7 +2225,7 @@ STAGE PLANS:
                               className: VectorReduceSinkLongOperator
                               native: true
                               nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
-                          Statistics: Num rows: 1156 Data size: 77440 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 1251 Data size: 83804 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: bigint)
             Execution mode: vectorized, llap
             LLAP IO: all inputs
@@ -2260,7 +2260,7 @@ STAGE PLANS:
                 keys: KEY._col0 (type: smallint)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-                Statistics: Num rows: 1156 Data size: 77440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 1251 Data size: 83804 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
                   expressions: _col0 (type: smallint), (UDFToInteger(_col0) % -75) (type: int), power(((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END), 0.5) (type: double), (-1.389 / CAST( _col0 AS decimal(5,0))) (type: decimal(10,9)), _col4 (type: bigint), (UDFToDouble((UDFToInteger(_col0) % -75)) / UDFToDouble(_col4)) (type: double), (- (UDFToInteger(_col0) % -75)) (type: int), ((_col5 - ((_col6 * _col6) / _col7)) / _col7) (type: double), (- (- (UDFToInteger(_col0) % -75))) (type: int), _col8 (type: bigint), (_col8 - -89010L) (type: bigint)
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
@@ -2269,7 +2269,7 @@ STAGE PLANS:
                       native: true
                       projectedOutputColumnNums: [0, 9, 10, 16, 4, 18, 19, 17, 14, 8, 20]
                       selectExpressions: LongColModuloLongScalar(col 0:int, val -75)(children: col 0:smallint) -> 9:int, FuncPowerDoubleToDouble(col 11:double)(children: DoubleColDivideLongColumn(col 10:double, col 14:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 11:double)(children: DoubleColDivideLongColumn(col 10:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 10:double) -> 11:double) -> 10:double, IfExprNullCondExpr(col 12:boolean, null, col 13:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 12:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 13:bigint) -> 14:bigint) -> 11:double) -> 10:double, DecimalScalarDivideDecimalColumn(val -1.389, col 15:decimal(5,0))(children: CastLongToDecimal(col 0:smallint) -> 15:decimal(5,0)) -> 16:decimal(10,9), DoubleColDivideDoubleColumn(col 11:double, col 17:double)(children: CastLongToDouble(col 14:int)(children: LongColModuloLongScalar(col 0:int, val -7
 5)(children: col 0:smallint) -> 14:int) -> 11:double, CastLongToDouble(col 4:bigint) -> 17:double) -> 18:double, LongColUnaryMinus(col 14:int)(children: LongColModuloLongScalar(col 0:int, val -75)(children: col 0:smallint) -> 14:int) -> 19:int, DoubleColDivideLongColumn(col 11:double, col 7:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, col 17:double)(children: DoubleColDivideLongColumn(col 11:double, col 7:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) -> 11:double) -> 17:double) -> 11:double) -> 17:double, LongColUnaryMinus(col 20:int)(children: LongColUnaryMinus(col 14:int)(children: LongColModuloLongScalar(col 0:int, val -75)(children: col 0:smallint) -> 14:int) -> 20:int) -> 14:int, LongColSubtractLongScalar(col 8:bigint, val -89010) -> 20:bigint
-                  Statistics: Num rows: 1156 Data size: 202288 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1251 Data size: 218912 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: double), _col3 (type: decimal(10,9)), _col4 (type: bigint), _col5 (type: double), _col6 (type: int), _col7 (type: double), _col8 (type: int), _col9 (type: bigint), _col10 (type: bigint)
                     sort order: +++++++++++
@@ -2277,7 +2277,7 @@ STAGE PLANS:
                         className: VectorReduceSinkObjectHashOperator
                         native: true
                         nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
-                    Statistics: Num rows: 1156 Data size: 202288 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1251 Data size: 218912 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 0.1
         Reducer 3 
             Execution mode: vectorized, llap
@@ -2295,7 +2295,7 @@ STAGE PLANS:
                     className: VectorSelectOperator
                     native: true
                     projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
-                Statistics: Num rows: 1156 Data size: 202288 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 1251 Data size: 218912 Basic stats: COMPLETE Column stats: COMPLETE
                 Limit
                   Number of rows: 20
                   Limit Vectorization:
@@ -2494,7 +2494,7 @@ STAGE PLANS:
                         keys: _col0 (type: double)
                         mode: hash
                         outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                        Statistics: Num rows: 1127 Data size: 51824 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1251 Data size: 57520 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
                           key expressions: _col0 (type: double)
                           sort order: +
@@ -2503,7 +2503,7 @@ STAGE PLANS:
                               className: VectorReduceSinkMultiKeyOperator
                               native: true
                               nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
-                          Statistics: Num rows: 1127 Data size: 51824 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 1251 Data size: 57520 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: double)
             Execution mode: vectorized, llap
             LLAP IO: all inputs
@@ -2538,7 +2538,7 @@ STAGE PLANS:
                 keys: KEY._col0 (type: double)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                Statistics: Num rows: 1127 Data size: 51824 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 1251 Data size: 57520 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
                   expressions: _col0 (type: double), ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END) (type: double), (2563.58D * ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END)) (type: double), (- ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END)) (type: double), _col4 (type: bigint), ((2563.58D * ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END)) + -5638.15D) (type: double), ((- ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END)) * ((2563.58D * ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END)) + -5638.15D)) (type: double), _col5 (type: double), ((_col1 - ((_col2 * _col2) / _col3)) / _col3) (type: double), (_col0 - (- ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THE
 N (null) ELSE ((_col3 - 1)) END))) (type: double), power(((_col1 - ((_col2 * _col2) / _col3)) / _col3), 0.5) (type: double), (_col0 + ((_col1 - ((_col2 * _col2) / _col3)) / CASE WHEN ((_col3 = 1L)) THEN (null) ELSE ((_col3 - 1)) END)) (type: double), (_col0 * 762.0D) (type: double), _col2 (type: double), (-863.257D % (_col0 * 762.0D)) (type: double)
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14
@@ -2547,7 +2547,7 @@ STAGE PLANS:
                       native: true
                       projectedOutputColumnNums: [0, 7, 6, 11, 4, 17, 20, 5, 23, 26, 14, 29, 30, 2, 34]
                       selectExpressions: DoubleColDivideLongColumn(col 6:double, col 10:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 7:double)(children: DoubleColDivideLongColumn(col 6:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 6:double) -> 7:double) -> 6:double, IfExprNullCondExpr(col 8:boolean, null, col 9:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 8:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 9:bigint) -> 10:bigint) -> 7:double, DoubleScalarMultiplyDoubleColumn(val 2563.58, col 11:double)(children: DoubleColDivideLongColumn(col 6:double, col 13:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 11:double)(children: DoubleColDivideLongColumn(col 6:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 6:double) -> 11:double) -> 6:double, IfExprNullCondExpr(col 10:boolean, null, col 12:bigint)(children: LongColEqualLongScal
 ar(col 3:bigint, val 1) -> 10:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 12:bigint) -> 13:bigint) -> 11:double) -> 6:double, DoubleColUnaryMinus(col 14:double)(children: DoubleColDivideLongColumn(col 11:double, col 16:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 14:double)(children: DoubleColDivideLongColumn(col 11:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 11:double) -> 14:double) -> 11:double, IfExprNullCondExpr(col 13:boolean, null, col 15:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 13:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 15:bigint) -> 16:bigint) -> 14:double) -> 11:double, DoubleColAddDoubleScalar(col 14:double, val -5638.15)(children: DoubleScalarMultiplyDoubleColumn(val 2563.58, col 17:double)(children: DoubleColDivideLongColumn(col 14:double, col 19:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 17:double)(children: DoubleColDivi
 deLongColumn(col 14:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 14:double) -> 17:double) -> 14:double, IfExprNullCondExpr(col 16:boolean, null, col 18:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 16:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 18:bigint) -> 19:bigint) -> 17:double) -> 14:double) -> 17:double, DoubleColMultiplyDoubleColumn(col 14:double, col 23:double)(children: DoubleColUnaryMinus(col 20:double)(children: DoubleColDivideLongColumn(col 14:double, col 22:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 20:double)(children: DoubleColDivideLongColumn(col 14:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 14:double) -> 20:double) -> 14:double, IfExprNullCondExpr(col 19:boolean, null, col 21:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 19:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 21:bigint) ->
  22:bigint) -> 20:double) -> 14:double, DoubleColAddDoubleScalar(col 20:double, val -5638.15)(children: DoubleScalarMultiplyDoubleColumn(val 2563.58, col 23:double)(children: DoubleColDivideLongColumn(col 20:double, col 25:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 23:double)(children: DoubleColDivideLongColumn(col 20:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 20:double) -> 23:double) -> 20:double, IfExprNullCondExpr(col 22:boolean, null, col 24:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 22:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 24:bigint) -> 25:bigint) -> 23:double) -> 20:double) -> 23:double) -> 20:double, DoubleColDivideLongColumn(col 14:double, col 3:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 23:double)(children: DoubleColDivideLongColumn(col 14:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 14:dou
 ble) -> 23:double) -> 14:double) -> 23:double, DoubleColSubtractDoubleColumn(col 0:double, col 14:double)(children: DoubleColUnaryMinus(col 26:double)(children: DoubleColDivideLongColumn(col 14:double, col 28:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 26:double)(children: DoubleColDivideLongColumn(col 14:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 14:double) -> 26:double) -> 14:double, IfExprNullCondExpr(col 25:boolean, null, col 27:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 25:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 27:bigint) -> 28:bigint) -> 26:double) -> 14:double) -> 26:double, FuncPowerDoubleToDouble(col 29:double)(children: DoubleColDivideLongColumn(col 14:double, col 3:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 29:double)(children: DoubleColDivideLongColumn(col 14:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, co
 l 2:double) -> 14:double) -> 29:double) -> 14:double) -> 29:double) -> 14:double, DoubleColAddDoubleColumn(col 0:double, col 30:double)(children: DoubleColDivideLongColumn(col 29:double, col 32:bigint)(children: DoubleColSubtractDoubleColumn(col 1:double, col 30:double)(children: DoubleColDivideLongColumn(col 29:double, col 3:bigint)(children: DoubleColMultiplyDoubleColumn(col 2:double, col 2:double) -> 29:double) -> 30:double) -> 29:double, IfExprNullCondExpr(col 28:boolean, null, col 31:bigint)(children: LongColEqualLongScalar(col 3:bigint, val 1) -> 28:boolean, LongColSubtractLongScalar(col 3:bigint, val 1) -> 31:bigint) -> 32:bigint) -> 30:double) -> 29:double, DoubleColMultiplyDoubleScalar(col 0:double, val 762.0) -> 30:double, DoubleScalarModuloDoubleColumn(val -863.257, col 33:double)(children: DoubleColMultiplyDoubleScalar(col 0:double, val 762.0) -> 33:double) -> 34:double
-                  Statistics: Num rows: 1127 Data size: 141984 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1251 Data size: 157600 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: double)
                     sort order: +
@@ -2555,7 +2555,7 @@ STAGE PLANS:
                         className: VectorReduceSinkObjectHashOperator
                         native: true
                         nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
-                    Statistics: Num rows: 1127 Data size: 141984 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1251 Data size: 157600 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: double), _col14 (type: double)
         Reducer 3 
             Execution mode: vectorized, llap
@@ -2573,13 +2573,13 @@ STAGE PLANS:
                     className: VectorSelectOperator
                     native: true
                     projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 13]
-                Statistics: Num rows: 1127 Data size: 141984 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 1251 Data size: 157600 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
                   File Sink Vectorization:
                       className: VectorFileSinkOperator
                       native: false
-                  Statistics: Num rows: 1127 Data size: 141984 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1251 Data size: 157600 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorized_context.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_context.q.out b/ql/src/test/results/clientpositive/llap/vectorized_context.q.out
index 6570f74..0e0108a 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_context.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_context.q.out
@@ -134,7 +134,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col2
                         input vertices:
                           1 Map 2
-                        Statistics: Num rows: 6062 Data size: 48288 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 6075 Data size: 48444 Basic stats: COMPLETE Column stats: COMPLETE
                         Map Join Operator
                           condition map:
                                Inner Join 0 to 1
@@ -144,17 +144,17 @@ STAGE PLANS:
                           outputColumnNames: _col2, _col5
                           input vertices:
                             1 Map 3
-                          Statistics: Num rows: 6049 Data size: 612818 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 6075 Data size: 615574 Basic stats: COMPLETE Column stats: COMPLETE
                           Select Operator
                             expressions: _col5 (type: string), _col2 (type: double)
                             outputColumnNames: _col0, _col1
-                            Statistics: Num rows: 6049 Data size: 612818 Basic stats: COMPLETE Column stats: COMPLETE
+                            Statistics: Num rows: 6075 Data size: 615574 Basic stats: COMPLETE Column stats: COMPLETE
                             Limit
                               Number of rows: 100
-                              Statistics: Num rows: 100 Data size: 10200 Basic stats: COMPLETE Column stats: COMPLETE
+                              Statistics: Num rows: 100 Data size: 10208 Basic stats: COMPLETE Column stats: COMPLETE
                               File Output Operator
                                 compressed: false
-                                Statistics: Num rows: 100 Data size: 10200 Basic stats: COMPLETE Column stats: COMPLETE
+                                Statistics: Num rows: 100 Data size: 10208 Basic stats: COMPLETE Column stats: COMPLETE
                                 table:
                                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                                     output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out
index 97e8be6..c718ad0 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out
@@ -59,7 +59,7 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col1
                         input vertices:
                           1 Map 3
-                        Statistics: Num rows: 18464 Data size: 129120 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 13785 Data size: 91688 Basic stats: COMPLETE Column stats: COMPLETE
                         Select Operator
                           expressions: _col0 (type: int), _col1 (type: int), (_col0 + _col1) (type: int)
                           outputColumnNames: _col0, _col1, _col2
@@ -68,7 +68,7 @@ STAGE PLANS:
                               native: true
                               projectedOutputColumnNums: [2, 2, 13]
                               selectExpressions: LongColAddLongColumn(col 2:int, col 2:int) -> 13:int
-                          Statistics: Num rows: 18464 Data size: 129120 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 13785 Data size: 91688 Basic stats: COMPLETE Column stats: COMPLETE
                           Group By Operator
                             aggregations: count(_col0), max(_col1), min(_col0), sum(_col2), count(_col2)
                             Group By Vectorization:

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out
index 27bab20..1bfe5af 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_nested_mapjoin.q.out
@@ -41,11 +41,11 @@ STAGE PLANS:
                         outputColumnNames: _col2, _col3
                         input vertices:
                           1 Map 3
-                        Statistics: Num rows: 640986 Data size: 7671024 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 483205 Data size: 5777652 Basic stats: COMPLETE Column stats: COMPLETE
                         Select Operator
                           expressions: _col2 (type: smallint), _col3 (type: double)
                           outputColumnNames: _col0, _col1
-                          Statistics: Num rows: 640986 Data size: 7671024 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 483205 Data size: 5777652 Basic stats: COMPLETE Column stats: COMPLETE
                           Map Join Operator
                             condition map:
                                  Inner Join 0 to 1
@@ -55,7 +55,7 @@ STAGE PLANS:
                             outputColumnNames: _col1
                             input vertices:
                               1 Map 4
-                            Statistics: Num rows: 1389838 Data size: 11104832 Basic stats: COMPLETE Column stats: COMPLETE
+                            Statistics: Num rows: 782372 Data size: 6245104 Basic stats: COMPLETE Column stats: COMPLETE
                             Group By Operator
                               aggregations: sum(_col1)
                               mode: hash

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out b/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out
index 597c432..8aea10a 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out
@@ -120,11 +120,11 @@ STAGE PLANS:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 18464 Data size: 129120 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 13785 Data size: 91688 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), (_col0 + _col1) (type: int)
                   outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 18464 Data size: 129120 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 13785 Data size: 91688 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: count(_col0), max(_col1), min(_col0), sum(_col2), count(_col2)
                     mode: hash

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/llap/windowing_gby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/windowing_gby.q.out b/ql/src/test/results/clientpositive/llap/windowing_gby.q.out
index eef7562..4fa1fb9 100644
--- a/ql/src/test/results/clientpositive/llap/windowing_gby.q.out
+++ b/ql/src/test/results/clientpositive/llap/windowing_gby.q.out
@@ -41,7 +41,7 @@ Stage-0
                       PartitionCols:_col0
                       Group By Operator [GBY_10] (rows=3 width=20)
                         Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)","sum(_col1)"],keys:_col2
-                        Merge Join Operator [MERGEJOIN_34] (rows=36 width=7)
+                        Merge Join Operator [MERGEJOIN_34] (rows=27 width=7)
                           Conds:RS_37._col0=RS_40._col1(Inner),Output:["_col1","_col2","_col3"]
                         <-Map 1 [SIMPLE_EDGE] vectorized, llap
                           SHUFFLE [RS_37]

http://git-wip-us.apache.org/repos/asf/hive/blob/5c02fee2/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out b/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out
index c5d0d63..736321b 100644
--- a/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out
@@ -867,7 +867,7 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_10]
-        Join Operator [JOIN_8] (rows=18 width=85)
+        Join Operator [JOIN_8] (rows=6 width=85)
           Output:["_col0"],condition map:[{"":"{\"type\":\"Inner\",\"left\":0,\"right\":1}"}],keys:{"0":"_col0","1":"_col0"}
         <-Map 1 [PARTITION-LEVEL SORT]
           PARTITION-LEVEL SORT [RS_6]
@@ -1730,9 +1730,9 @@ Stage-0
     Stage-1
       Reducer 3
       File Output Operator [FS_18]
-        Select Operator [SEL_17] (rows=185 width=178)
+        Select Operator [SEL_17] (rows=167 width=178)
           Output:["_col0","_col1"]
-          Filter Operator [FIL_16] (rows=185 width=179)
+          Filter Operator [FIL_16] (rows=167 width=179)
             predicate:_col4 is null
             Join Operator [JOIN_15] (rows=250 width=179)
               Output:["_col0","_col1","_col4"],condition map:[{"":"{\"type\":\"Left Outer\",\"left\":0,\"right\":1}"}],keys:{"0":"_col0, _col1","1":"_col0, _col1"}
@@ -1806,7 +1806,7 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_12]
-        Join Operator [JOIN_10] (rows=131 width=178)
+        Join Operator [JOIN_10] (rows=83 width=178)
           Output:["_col0","_col1"],condition map:[{"":"{\"type\":\"Left Semi\",\"left\":0,\"right\":1}"}],keys:{"0":"_col0, _col1","1":"_col0, _col1"}
         <-Map 1 [PARTITION-LEVEL SORT]
           PARTITION-LEVEL SORT [RS_8]
@@ -1858,7 +1858,7 @@ Stage-0
     Stage-1
       Reducer 2
       File Output Operator [FS_12]
-        Join Operator [JOIN_10] (rows=131 width=178)
+        Join Operator [JOIN_10] (rows=83 width=178)
           Output:["_col0","_col1"],condition map:[{"":"{\"type\":\"Left Semi\",\"left\":0,\"right\":1}"}],keys:{"0":"_col0, _col1","1":"_col0, _col1"}
         <-Map 1 [PARTITION-LEVEL SORT]
           PARTITION-LEVEL SORT [RS_8]
@@ -1946,25 +1946,25 @@ Stage-0
     Stage-1
       Reducer 4
       File Output Operator [FS_22]
-        Select Operator [SEL_21] (rows=7 width=8)
+        Select Operator [SEL_21] (rows=3 width=8)
           Output:["_col0","_col1"]
-          Join Operator [JOIN_20] (rows=7 width=8)
+          Join Operator [JOIN_20] (rows=3 width=8)
             Output:["_col0","_col3"],condition map:[{"":"{\"type\":\"Left Semi\",\"left\":0,\"right\":1}"}],keys:{"0":"_col1, _col4","1":"_col0, _col1"}
           <-Map 6 [PARTITION-LEVEL SORT]
             PARTITION-LEVEL SORT [RS_19]
               PartitionCols:_col0, _col1
-              Group By Operator [GBY_17] (rows=4 width=8)
+              Group By Operator [GBY_17] (rows=1 width=8)
                 Output:["_col0","_col1"],keys:_col0, _col1
-                Select Operator [SEL_12] (rows=14 width=8)
+                Select Operator [SEL_12] (rows=2 width=8)
                   Output:["_col0","_col1"]
-                  Filter Operator [FIL_25] (rows=14 width=96)
+                  Filter Operator [FIL_25] (rows=2 width=96)
                     predicate:((l_linenumber = 1) and (l_shipmode = 'AIR') and l_orderkey is not null)
                     TableScan [TS_10] (rows=100 width=96)
                       default@lineitem,lineitem,Tbl:COMPLETE,Col:COMPLETE,Output:["l_orderkey","l_linenumber","l_shipmode"]
           <-Reducer 3 [PARTITION-LEVEL SORT]
             PARTITION-LEVEL SORT [RS_18]
               PartitionCols:_col1, _col4
-              Join Operator [JOIN_15] (rows=7 width=16)
+              Join Operator [JOIN_15] (rows=14 width=16)
                 Output:["_col0","_col1","_col3","_col4"],condition map:[{"":"{\"type\":\"Inner\",\"left\":0,\"right\":1}"}],keys:{"0":"_col0","1":"_col1"}
               <-Map 5 [PARTITION-LEVEL SORT]
                 PARTITION-LEVEL SORT [RS_14]
@@ -2016,7 +2016,7 @@ Stage-0
     Stage-1
       Reducer 4
       File Output Operator [FS_31]
-        Join Operator [JOIN_29] (rows=32 width=186)
+        Join Operator [JOIN_29] (rows=41 width=186)
           Output:["_col0","_col1","_col2"],condition map:[{"":"{\"type\":\"Left Semi\",\"left\":0,\"right\":1}"}],keys:{"0":"_col2","1":"_col0"}
         <-Reducer 3 [PARTITION-LEVEL SORT]
           PARTITION-LEVEL SORT [RS_27]
@@ -2100,7 +2100,7 @@ Stage-0
     Stage-1
       Reducer 3
       File Output Operator [FS_21]
-        Join Operator [JOIN_19] (rows=6 width=227)
+        Join Operator [JOIN_19] (rows=13 width=227)
           Output:["_col0","_col1","_col2"],condition map:[{"":"{\"type\":\"Left Semi\",\"left\":0,\"right\":1}"}],keys:{"0":"_col1","1":"_col0"}
         <-Reducer 2 [PARTITION-LEVEL SORT]
           PARTITION-LEVEL SORT [RS_17]
@@ -2807,16 +2807,16 @@ Stage-0
           GROUP [RS_18]
             Group By Operator [GBY_17] (rows=1 width=16)
               Output:["_col0","_col1"],aggregations:["sum(_col0)","sum(_col1)"]
-              Select Operator [SEL_15] (rows=9 width=94)
+              Select Operator [SEL_15] (rows=12 width=94)
                 Output:["_col0","_col1"]
-                Group By Operator [GBY_14] (rows=9 width=94)
+                Group By Operator [GBY_14] (rows=12 width=94)
                   Output:["_col0","_col1"],aggregations:["count(VALUE._col0)"],keys:KEY._col0
                 <-Reducer 2 [GROUP]
                   GROUP [RS_13]
                     PartitionCols:_col0
-                    Group By Operator [GBY_12] (rows=9 width=94)
+                    Group By Operator [GBY_12] (rows=12 width=94)
                       Output:["_col0","_col1"],aggregations:["count()"],keys:_col0
-                      Join Operator [JOIN_10] (rows=19 width=86)
+                      Join Operator [JOIN_10] (rows=25 width=86)
                         Output:["_col0"],condition map:[{"":"{\"type\":\"Left Semi\",\"left\":0,\"right\":1}"}],keys:{"0":"_col0","1":"_col0"}
                       <-Map 1 [PARTITION-LEVEL SORT]
                         PARTITION-LEVEL SORT [RS_8]