You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vg...@apache.org on 2018/08/20 20:26:08 UTC

[2/2] hive git commit: HIVE-20366: TPC-DS query78 stats estimates are off for is null filter(Vineet Garg, reviewed by Ashutosh Chauhan)

HIVE-20366: TPC-DS query78 stats estimates are off for is null filter(Vineet Garg, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/20baf490
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/20baf490
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/20baf490

Branch: refs/heads/master
Commit: 20baf490cbc109b17da6b6b1c0fd64ca32314e6f
Parents: f280361
Author: Vineet Garg <vg...@apache.org>
Authored: Mon Aug 20 13:25:32 2018 -0700
Committer: Vineet Garg <vg...@apache.org>
Committed: Mon Aug 20 13:25:32 2018 -0700

----------------------------------------------------------------------
 .../stats/annotation/StatsRulesProcFactory.java |  87 ++++++++---
 .../clientpositive/annotate_stats_join.q.out    |   4 +-
 .../llap/bucket_map_join_tez2.q.out             |  16 +-
 .../clientpositive/llap/check_constraint.q.out  |   2 +-
 .../llap/correlationoptimizer1.q.out            |  24 +--
 .../clientpositive/llap/explainuser_1.q.out     |  24 +--
 .../llap/insert_into_default_keyword.q.out      |   2 +-
 .../results/clientpositive/llap/join46.q.out    |  36 ++---
 .../llap/join_emit_interval.q.out               |   4 +-
 .../results/clientpositive/llap/mapjoin46.q.out |  36 ++---
 .../llap/mapjoin_emit_interval.q.out            |   4 +-
 .../clientpositive/llap/subquery_in.q.out       |  28 ++--
 .../clientpositive/llap/subquery_multi.q.out    |  10 +-
 .../clientpositive/llap/subquery_notin.q.out    | 150 +++++++++----------
 .../clientpositive/llap/subquery_scalar.q.out   |   8 +-
 .../clientpositive/llap/subquery_select.q.out   |  46 +++---
 .../clientpositive/llap/tez_join_tests.q.out    |  12 +-
 .../clientpositive/llap/tez_joins_explain.q.out |  12 +-
 .../clientpositive/llap/unionDistinct_1.q.out   |  18 +--
 .../clientpositive/llap/vector_coalesce_3.q.out |   2 +-
 .../llap/vector_groupby_mapjoin.q.out           |   4 +-
 .../llap/vector_outer_join0.q.out               |   8 +-
 .../clientpositive/llap/vectorized_join46.q.out |  24 +--
 .../spark/annotate_stats_join.q.out             |   4 +-
 .../spark/spark_explainuser_1.q.out             |  24 +--
 25 files changed, 317 insertions(+), 272 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
index 7682791..9cd6812 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
@@ -1696,12 +1696,17 @@ public class StatsRulesProcFactory {
         }
 
         List<Long> distinctVals = Lists.newArrayList();
+
+        // these ndvs are later used to compute unmatched rows and num of nulls for outer joins
+        List<Long> ndvsUnmatched= Lists.newArrayList();
         long denom = 1;
+        long denomUnmatched = 1;
         if (inferredRowCount == -1) {
           // failed to infer PK-FK relationship for row count estimation fall-back on default logic
           // compute denominator  max(V(R,y1), V(S,y1)) * max(V(R,y2), V(S,y2))
           // in case of multi-attribute join
           List<Long> perAttrDVs = Lists.newArrayList();
+          // go over each predicate
           for (int idx = 0; idx < numAttr; idx++) {
             for (Integer i : joinKeys.keySet()) {
               String col = joinKeys.get(i).get(idx);
@@ -1711,19 +1716,27 @@ public class StatsRulesProcFactory {
               }
             }
             distinctVals.add(getDenominator(perAttrDVs));
+            ndvsUnmatched.add(getDenominatorForUnmatchedRows(perAttrDVs));
             perAttrDVs.clear();
           }
 
           if (numAttr > 1 && conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_CORRELATED_MULTI_KEY_JOINS)) {
             denom = Collections.max(distinctVals);
+            denomUnmatched = denom - ndvsUnmatched.get(distinctVals.indexOf(denom));
           } else if (numAttr > numParent) {
             // To avoid denominator getting larger and aggressively reducing
             // number of rows, we will ease out denominator.
             denom = StatsUtils.addWithExpDecay(distinctVals);
+            denomUnmatched = denom - StatsUtils.addWithExpDecay(ndvsUnmatched);
           } else {
             for (Long l : distinctVals) {
               denom = StatsUtils.safeMult(denom, l);
             }
+            long tempDenom = 1;
+            for (Long l : ndvsUnmatched) {
+              tempDenom = StatsUtils.safeMult(tempDenom, l);
+            }
+            denomUnmatched = denom - tempDenom;
           }
         }
 
@@ -1754,15 +1767,22 @@ public class StatsRulesProcFactory {
         // update join statistics
         stats.setColumnStats(outColStats);
 
-        // reason we compute interim row count, where join type isn't considered, is because later
-        // it will be used to estimate num nulls
         long interimRowCount = inferredRowCount != -1 ? inferredRowCount
           : computeRowCountAssumingInnerJoin(rowCounts, denom, jop);
         // final row computation will consider join type
         long joinRowCount = inferredRowCount != -1 ? inferredRowCount
           : computeFinalRowCount(rowCounts, interimRowCount, jop);
 
-        updateColStats(conf, stats, interimRowCount, joinRowCount, jop, rowCountParents);
+        // the idea is to measure unmatche rows in outer joins by figuring out how many rows didn't match
+        // mismatched rows are figured using denomUnmatched which is the difference of denom used for computing
+        // join cardinality minus the ndv which wasn't used. This number (mismatched rows) is then subtracted from
+        /// join cardinality to get the rows which didn't match
+        long unMatchedRows = Math.abs(computeRowCountAssumingInnerJoin(rowCounts, denomUnmatched, jop) - joinRowCount);
+        if(denomUnmatched == 0) {
+          // if unmatched denominator is zero we take it as all rows will match
+          unMatchedRows = 0;
+        }
+        updateColStats(conf, stats, unMatchedRows, joinRowCount, jop, rowCountParents);
 
         // evaluate filter expression and update statistics
         if (joinRowCount != -1 && jop.getConf().getNoOuterJoin() &&
@@ -2159,7 +2179,7 @@ public class StatsRulesProcFactory {
       return false;
     }
 
-    private void updateNumNulls(ColStatistics colStats, long interimNumRows, long newNumRows,
+    private void updateNumNulls(ColStatistics colStats, long unmatchedRows, long newNumRows,
         long pos, CommonJoinOperator<? extends JoinDesc> jop) {
 
       if (!(jop.getConf().getConds().length == 1)) {
@@ -2175,39 +2195,31 @@ public class StatsRulesProcFactory {
       case JoinDesc.LEFT_OUTER_JOIN:
         //if this column is coming from right input only then we update num nulls
         if (pos == joinCond.getRight()
-            && interimNumRows != newNumRows) {
-          // interim row count can not be less due to containment
-          // assumption in join cardinality computation
-          assert (newNumRows > interimNumRows);
+            && unmatchedRows != newNumRows) {
           if (isJoinKey(colStats.getColumnName(), jop.getConf().getJoinKeys())) {
-            newNumNulls = Math.min(newNumRows, (newNumRows - interimNumRows));
+            newNumNulls = Math.min(newNumRows, (unmatchedRows));
           } else {
-            newNumNulls = Math.min(newNumRows, oldNumNulls + (newNumRows - interimNumRows));
+            newNumNulls = Math.min(newNumRows, oldNumNulls + (unmatchedRows));
           }
         }
         break;
       case JoinDesc.RIGHT_OUTER_JOIN:
         if (pos == joinCond.getLeft()
-            && interimNumRows != newNumRows) {
-
-          // interim row count can not be less due to containment
-          // assumption in join cardinality computation
-          // interimNumRows represent number of matches for join keys on two sides.
-          // newNumRows-interimNumRows represent number of non-matches.
-          assert (newNumRows > interimNumRows);
+            && unmatchedRows != newNumRows) {
 
           if (isJoinKey(colStats.getColumnName(), jop.getConf().getJoinKeys())) {
-            newNumNulls = Math.min(newNumRows, (newNumRows - interimNumRows));
+            newNumNulls = Math.min(newNumRows, ( unmatchedRows));
           } else {
-            newNumNulls = Math.min(newNumRows, oldNumNulls + (newNumRows - interimNumRows));
+            // TODO: oldNumNulls should be scaled instead of taken as it is
+            newNumNulls = Math.min(newNumRows, oldNumNulls + (unmatchedRows));
           }
         }
         break;
       case JoinDesc.FULL_OUTER_JOIN:
         if (isJoinKey(colStats.getColumnName(), jop.getConf().getJoinKeys())) {
-          newNumNulls = Math.min(newNumRows, (newNumRows - interimNumRows));
+          newNumNulls = Math.min(newNumRows, (unmatchedRows));
         } else {
-          newNumNulls = Math.min(newNumRows, oldNumNulls + (newNumRows - interimNumRows));
+          newNumNulls = Math.min(newNumRows, oldNumNulls + (unmatchedRows));
         }
         break;
 
@@ -2379,6 +2391,39 @@ public class StatsRulesProcFactory {
       }
     }
 
+    private long getDenominatorForUnmatchedRows(List<Long> distinctVals) {
+
+      if (distinctVals.isEmpty()) {
+        return 2;
+      }
+
+      // simple join from 2 relations: denom = min(v1, v2)
+      if (distinctVals.size() <= 2) {
+        return Collections.min(distinctVals);
+      } else {
+
+        // remember max value and ignore it from the denominator
+        long maxNDV = distinctVals.get(0);
+        int maxIdx = 0;
+
+        for (int i = 1; i < distinctVals.size(); i++) {
+          if (distinctVals.get(i) > maxNDV) {
+            maxNDV = distinctVals.get(i);
+            maxIdx = i;
+          }
+        }
+
+        // join from multiple relations:
+        // denom = Product of all NDVs except the greatest of all
+        long denom = 1;
+        for (int i = 0; i < distinctVals.size(); i++) {
+          if (i != maxIdx) {
+            denom = StatsUtils.safeMult(denom, distinctVals.get(i));
+          }
+        }
+        return denom;
+      }
+    }
     private long getDenominator(List<Long> distinctVals) {
 
       if (distinctVals.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/annotate_stats_join.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/annotate_stats_join.q.out b/ql/src/test/results/clientpositive/annotate_stats_join.q.out
index b0d2b05..c2bf2e5 100644
--- a/ql/src/test/results/clientpositive/annotate_stats_join.q.out
+++ b/ql/src/test/results/clientpositive/annotate_stats_join.q.out
@@ -898,10 +898,10 @@ STAGE PLANS:
             0 _col0 (type: string), _col1 (type: int)
             1 _col1 (type: string), _col0 (type: int)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 54 Data size: 9506 Basic stats: COMPLETE Column stats: COMPLETE
+          Statistics: Num rows: 54 Data size: 10476 Basic stats: COMPLETE Column stats: COMPLETE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 54 Data size: 9506 Basic stats: COMPLETE Column stats: COMPLETE
+            Statistics: Num rows: 54 Data size: 10476 Basic stats: COMPLETE Column stats: COMPLETE
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
index 205cd44..4f042ce 100644
--- a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
@@ -856,10 +856,10 @@ STAGE PLANS:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -911,10 +911,10 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col1
                         input vertices:
                           1 Map 2
-                        Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator
                           compressed: false
-                          Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                           table:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1016,10 +1016,10 @@ STAGE PLANS:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1091,10 +1091,10 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col1
                         input vertices:
                           0 Map 1
-                        Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator
                           compressed: false
-                          Statistics: Num rows: 166 Data size: 1328 Basic stats: COMPLETE Column stats: COMPLETE
+                          Statistics: Num rows: 166 Data size: 668 Basic stats: COMPLETE Column stats: COMPLETE
                           table:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/check_constraint.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/check_constraint.q.out b/ql/src/test/results/clientpositive/llap/check_constraint.q.out
index ec1ed64..be1084b 100644
--- a/ql/src/test/results/clientpositive/llap/check_constraint.q.out
+++ b/ql/src/test/results/clientpositive/llap/check_constraint.q.out
@@ -1537,7 +1537,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 262 Data size: 69430 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 262 Data size: 46723 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
                   expressions: UDFToInteger(_col0) (type: int), CAST( _col2 AS decimal(5,2)) (type: decimal(5,2)), _col1 (type: string)
                   outputColumnNames: _col0, _col1, _col2

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out b/ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out
index 21b07b2..0edeef9 100644
--- a/ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out
+++ b/ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out
@@ -1104,18 +1104,18 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col1
-                Statistics: Num rows: 39 Data size: 3393 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 39 Data size: 3306 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col1 (type: string)
                   mode: hash
                   outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 3 Data size: 285 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: string)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 3 Data size: 285 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
         Reducer 3 
             Execution mode: vectorized, llap
@@ -1246,18 +1246,18 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col1
-                Statistics: Num rows: 39 Data size: 3393 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 39 Data size: 3306 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col1 (type: string)
                   mode: hash
                   outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 3 Data size: 285 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: string)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 3 Data size: 285 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
         Reducer 3 
             Execution mode: vectorized, llap
@@ -1385,7 +1385,7 @@ STAGE PLANS:
                   0 _col0 (type: string), _col1 (type: string)
                   1 _col0 (type: string), _col1 (type: string)
                 outputColumnNames: _col0, _col3
-                Statistics: Num rows: 39 Data size: 6903 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 39 Data size: 6812 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col0 (type: string), _col3 (type: string)
@@ -1516,7 +1516,7 @@ STAGE PLANS:
                   0 _col0 (type: string), _col1 (type: string)
                   1 _col0 (type: string), _col1 (type: string)
                 outputColumnNames: _col0, _col3
-                Statistics: Num rows: 39 Data size: 6903 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 39 Data size: 6812 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col0 (type: string), _col3 (type: string)
@@ -1934,7 +1934,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 3440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 3612 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col0 (type: string)
@@ -2076,7 +2076,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 3440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 3612 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col0 (type: string)
@@ -2218,7 +2218,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 525 Data size: 3440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 525 Data size: 3612 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col0 (type: string)
@@ -2360,7 +2360,7 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 525 Data size: 3440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 525 Data size: 3612 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
                   aggregations: count()
                   keys: _col0 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
index a981916..d99b370 100644
--- a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
@@ -957,9 +957,9 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=80 width=7)
+        Select Operator [SEL_7] (rows=80 width=4)
           Output:["_col0","_col1"]
-          Merge Join Operator [MERGEJOIN_16] (rows=80 width=7)
+          Merge Join Operator [MERGEJOIN_16] (rows=80 width=4)
             Conds:RS_4._col0=RS_5._col0(Left Outer),Output:["_col1","_col3"]
           <-Map 1 [SIMPLE_EDGE] llap
             SHUFFLE [RS_4]
@@ -991,9 +991,9 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_8]
-        Select Operator [SEL_7] (rows=80 width=7)
+        Select Operator [SEL_7] (rows=80 width=0)
           Output:["_col0","_col1"]
-          Merge Join Operator [MERGEJOIN_9] (rows=80 width=7)
+          Merge Join Operator [MERGEJOIN_9] (rows=80 width=0)
             Conds:RS_4._col0=RS_5._col0(Outer),Output:["_col1","_col3"]
           <-Map 1 [SIMPLE_EDGE] llap
             SHUFFLE [RS_4]
@@ -1715,9 +1715,9 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_19]
-        Select Operator [SEL_18] (rows=365 width=178)
+        Select Operator [SEL_18] (rows=315 width=178)
           Output:["_col0","_col1"]
-          Filter Operator [FIL_17] (rows=365 width=179)
+          Filter Operator [FIL_17] (rows=315 width=179)
             predicate:_col3 is null
             Merge Join Operator [MERGEJOIN_27] (rows=500 width=179)
               Conds:RS_14._col1=RS_15._col0(Left Outer),Output:["_col0","_col1","_col3"]
@@ -1777,11 +1777,11 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_18]
-        Select Operator [SEL_17] (rows=167 width=178)
+        Select Operator [SEL_17] (rows=126 width=178)
           Output:["_col0","_col1"]
-          Filter Operator [FIL_16] (rows=167 width=179)
+          Filter Operator [FIL_16] (rows=126 width=180)
             predicate:_col4 is null
-            Merge Join Operator [MERGEJOIN_31] (rows=250 width=179)
+            Merge Join Operator [MERGEJOIN_31] (rows=250 width=180)
               Conds:GBY_4._col0, _col1=SEL_12._col0, _col1(Left Outer),Output:["_col0","_col1","_col4"]
             <-Select Operator [SEL_12] (rows=83 width=182)
                 Output:["_col0","_col1","_col2"]
@@ -2287,14 +2287,14 @@ Stage-0
       File Output Operator [FS_24]
         Select Operator [SEL_23] (rows=13 width=223)
           Output:["_col0","_col1","_col2"]
-          Filter Operator [FIL_22] (rows=13 width=237)
+          Filter Operator [FIL_22] (rows=13 width=225)
             predicate:CASE WHEN ((_col4 = 0L)) THEN (true) WHEN (_col4 is null) THEN (true) WHEN (_col8 is not null) THEN (false) WHEN (_col0 is null) THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END
-            Merge Join Operator [MERGEJOIN_45] (rows=26 width=236)
+            Merge Join Operator [MERGEJOIN_45] (rows=26 width=224)
               Conds:RS_19._col0, _col1=RS_20._col0, _col1(Left Outer),Output:["_col0","_col1","_col2","_col4","_col5","_col8"]
             <-Reducer 2 [SIMPLE_EDGE] llap
               SHUFFLE [RS_19]
                 PartitionCols:_col0, _col1
-                Merge Join Operator [MERGEJOIN_44] (rows=26 width=235)
+                Merge Join Operator [MERGEJOIN_44] (rows=26 width=223)
                   Conds:RS_16._col1=RS_17._col0(Left Outer),Output:["_col0","_col1","_col2","_col4","_col5"]
                 <-Map 1 [SIMPLE_EDGE] llap
                   SHUFFLE [RS_16]

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out b/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
index 1a61c0e..f604443 100644
--- a/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
+++ b/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
@@ -2601,7 +2601,7 @@ STAGE PLANS:
                   0 key (type: int)
                   1 key (type: int)
                 outputColumnNames: _col0, _col2, _col5, _col6, _col7
-                Statistics: Num rows: 2 Data size: 432 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 2 Data size: 522 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: ((_col0 = _col6) and (_col6 < 3)) (type: boolean)
                   Statistics: Num rows: 1 Data size: 261 Basic stats: COMPLETE Column stats: COMPLETE

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/join46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join46.q.out b/ql/src/test/results/clientpositive/llap/join46.q.out
index b6ef9b1..07c4a62 100644
--- a/ql/src/test/results/clientpositive/llap/join46.q.out
+++ b/ql/src/test/results/clientpositive/llap/join46.q.out
@@ -106,10 +106,10 @@ STAGE PLANS:
                   0 _col1 (type: int)
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -220,10 +220,10 @@ STAGE PLANS:
                   0 _col1 (type: int)
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                Statistics: Num rows: 6 Data size: 857 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 6 Data size: 952 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 6 Data size: 857 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 6 Data size: 952 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -435,10 +435,10 @@ STAGE PLANS:
                   0 _col1 (type: int)
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -981,10 +981,10 @@ STAGE PLANS:
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1533,10 +1533,10 @@ STAGE PLANS:
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1976,10 +1976,10 @@ STAGE PLANS:
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                Statistics: Num rows: 10 Data size: 1711 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 10 Data size: 1711 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2138,10 +2138,10 @@ STAGE PLANS:
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
         Reducer 3 
             Execution mode: llap
@@ -2154,10 +2154,10 @@ STAGE PLANS:
                   1 
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
                 residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))}
-                Statistics: Num rows: 64 Data size: 24440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 64 Data size: 23107 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 64 Data size: 24440 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 64 Data size: 23107 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -2173,10 +2173,10 @@ STAGE PLANS:
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
 
   Stage: Stage-0

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/join_emit_interval.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join_emit_interval.q.out b/ql/src/test/results/clientpositive/llap/join_emit_interval.q.out
index 9484b7a..b5d5bd7 100644
--- a/ql/src/test/results/clientpositive/llap/join_emit_interval.q.out
+++ b/ql/src/test/results/clientpositive/llap/join_emit_interval.q.out
@@ -109,10 +109,10 @@ STAGE PLANS:
                   0 _col1 (type: int)
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mapjoin46.q.out b/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
index 204e775..52eb609 100644
--- a/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
@@ -80,10 +80,10 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       input vertices:
                         1 Map 2
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -183,10 +183,10 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       input vertices:
                         1 Map 2
-                      Statistics: Num rows: 6 Data size: 857 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 6 Data size: 952 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 6 Data size: 857 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 6 Data size: 952 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -407,10 +407,10 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       input vertices:
                         0 Map 1
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -909,10 +909,10 @@ STAGE PLANS:
                       input vertices:
                         1 Map 2
                       residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1346,10 +1346,10 @@ STAGE PLANS:
                       input vertices:
                         0 Map 1
                       residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1791,10 +1791,10 @@ STAGE PLANS:
                   1 _col1 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                Statistics: Num rows: 10 Data size: 1711 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 10 Data size: 1711 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -1917,10 +1917,10 @@ STAGE PLANS:
                       input vertices:
                         1 Map 4
                       residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
@@ -1943,10 +1943,10 @@ STAGE PLANS:
                       input vertices:
                         0 Map 1
                       residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
@@ -1978,10 +1978,10 @@ STAGE PLANS:
                   1 
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
                 residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))}
-                Statistics: Num rows: 64 Data size: 24440 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 64 Data size: 23107 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 64 Data size: 24440 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 64 Data size: 23107 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/mapjoin_emit_interval.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mapjoin_emit_interval.q.out b/ql/src/test/results/clientpositive/llap/mapjoin_emit_interval.q.out
index f6a1a6e..b667872 100644
--- a/ql/src/test/results/clientpositive/llap/mapjoin_emit_interval.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapjoin_emit_interval.q.out
@@ -83,10 +83,10 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       input vertices:
                         1 Map 2
-                      Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 8 Data size: 1524 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE
                         table:
                             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/subquery_in.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_in.q.out b/ql/src/test/results/clientpositive/llap/subquery_in.q.out
index cb2aa4c..5a34d74 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_in.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_in.q.out
@@ -4391,10 +4391,10 @@ STAGE PLANS:
                   0 _col4 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10
-                Statistics: Num rows: 26 Data size: 16214 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 26 Data size: 16278 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (sq_count_check(CASE WHEN (_col10 is null) THEN (0) ELSE (_col10) END, true) > 0) (type: boolean)
-                  Statistics: Num rows: 8 Data size: 4992 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 5016 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
@@ -4898,10 +4898,10 @@ STAGE PLANS:
                   0 _col4 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10
-                Statistics: Num rows: 26 Data size: 16214 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 26 Data size: 16278 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (sq_count_check(CASE WHEN (_col10 is null) THEN (0) ELSE (_col10) END, true) > 0) (type: boolean)
-                  Statistics: Num rows: 8 Data size: 4992 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 5016 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
@@ -4922,12 +4922,12 @@ STAGE PLANS:
                   0 _col4 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10, _col11
-                Statistics: Num rows: 8 Data size: 5080 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 4968 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col4 (type: string), UDFToLong(_col5) (type: bigint)
                   sort order: ++
                   Map-reduce partition columns: _col4 (type: string), UDFToLong(_col5) (type: bigint)
-                  Statistics: Num rows: 8 Data size: 5080 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 4968 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col10 (type: bigint), _col11 (type: bigint)
         Reducer 4 
             Execution mode: llap
@@ -4939,10 +4939,10 @@ STAGE PLANS:
                   0 _col4 (type: string), UDFToLong(_col5) (type: bigint)
                   1 _col1 (type: string), _col0 (type: bigint)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10, _col11, _col14
-                Statistics: Num rows: 8 Data size: 5112 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 4972 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: CASE WHEN ((_col10 = 0L)) THEN (true) WHEN (_col10 is null) THEN (true) WHEN (_col14 is not null) THEN (false) WHEN (_col5 is null) THEN (null) WHEN ((_col11 < _col10)) THEN (false) ELSE (true) END (type: boolean)
-                  Statistics: Num rows: 4 Data size: 2556 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 4 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
@@ -5153,10 +5153,10 @@ STAGE PLANS:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10
-                Statistics: Num rows: 26 Data size: 16206 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 26 Data size: 16294 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (sq_count_check(CASE WHEN (_col10 is null) THEN (0) ELSE (_col10) END, true) > 0) (type: boolean)
-                  Statistics: Num rows: 8 Data size: 4992 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 5016 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
@@ -5177,12 +5177,12 @@ STAGE PLANS:
                   0 _col0 (type: int)
                   1 _col0 (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10, _col11
-                Statistics: Num rows: 8 Data size: 5080 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 4968 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col0 (type: int), UDFToDouble(_col5) (type: double)
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: int), UDFToDouble(_col5) (type: double)
-                  Statistics: Num rows: 8 Data size: 5080 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 8 Data size: 4968 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col10 (type: bigint), _col11 (type: bigint)
         Reducer 4 
             Execution mode: llap
@@ -5194,10 +5194,10 @@ STAGE PLANS:
                   0 _col0 (type: int), UDFToDouble(_col5) (type: double)
                   1 _col1 (type: int), _col0 (type: double)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col10, _col11, _col14
-                Statistics: Num rows: 8 Data size: 5112 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 8 Data size: 4972 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: CASE WHEN ((_col10 = 0L)) THEN (true) WHEN (_col10 is null) THEN (true) WHEN (_col14 is not null) THEN (false) WHEN (_col5 is null) THEN (null) WHEN ((_col11 < _col10)) THEN (false) ELSE (true) END (type: boolean)
-                  Statistics: Num rows: 4 Data size: 2556 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 4 Data size: 2496 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8

http://git-wip-us.apache.org/repos/asf/hive/blob/20baf490/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
index a865ee9..0f9e270 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
@@ -4164,10 +4164,10 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col1, _col2, _col4
-                Statistics: Num rows: 500 Data size: 50028 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 50216 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  Statistics: Num rows: 500 Data size: 50028 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 500 Data size: 50216 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string), _col2 (type: bigint), _col4 (type: boolean)
         Reducer 4 
             Execution mode: llap
@@ -4179,12 +4179,12 @@ STAGE PLANS:
                   0 
                   1 
                 outputColumnNames: _col1, _col2, _col4, _col5
-                Statistics: Num rows: 500 Data size: 52028 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 52216 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: ((_col4 is not null and (_col2 <> 0L)) or _col1 is not null or _col5 is not null) (type: boolean)
-                  Statistics: Num rows: 500 Data size: 52028 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 500 Data size: 52216 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    Statistics: Num rows: 500 Data size: 52028 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 500 Data size: 52216 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       mode: hash