You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by we...@apache.org on 2017/05/17 02:52:39 UTC

[01/50] [abbrv] hive git commit: HIVE-16586: Fix Unit test failures when CachedStore is enabled (Daniel Dai, reviewed by Thejas Nair)

Repository: hive
Updated Branches:
  refs/heads/hive-14535 77511070d -> 08edf03f6


HIVE-16586: Fix Unit test failures when CachedStore is enabled (Daniel Dai, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8a7b5b56
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8a7b5b56
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8a7b5b56

Branch: refs/heads/hive-14535
Commit: 8a7b5b565ad1c719afcd565c41523ef0bd1edbfb
Parents: dd2697c
Author: Daniel Dai <da...@hortonworks.com>
Authored: Fri May 12 11:31:42 2017 -0700
Committer: Daniel Dai <da...@hortonworks.com>
Committed: Fri May 12 11:31:42 2017 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/metastore/cache/CacheUtils.java  | 2 +-
 .../org/apache/hadoop/hive/metastore/cache/CachedStore.java | 9 ++++++---
 .../org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java  | 3 ++-
 3 files changed, 9 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8a7b5b56/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
index b438479..668499b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java
@@ -103,7 +103,7 @@ public class CacheUtils {
     String[] subpatterns = pattern.trim().split("\\|");
     for (String subpattern : subpatterns) {
       subpattern = "(?i)" + subpattern.replaceAll("\\?", ".{1}").replaceAll("\\*", ".*")
-          .replaceAll("\\^", "\\\\^").replaceAll("\\$", "\\\\$");;
+          .replaceAll("\\^", "\\\\^").replaceAll("\\$", "\\\\$");
       if (Pattern.matches(subpattern, HiveStringUtils.normalizeIdentifier(name))) {
         return true;
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/8a7b5b56/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index a04fd5b..5a187d8 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -514,8 +514,8 @@ public class CachedStore implements RawStore, Configurable {
     if (succ) {
       interruptCacheUpdateMaster();
       for (Partition part : parts) {
-        SharedCache.addPartitionToCache(HiveStringUtils.normalizeIdentifier(dbName),
-            HiveStringUtils.normalizeIdentifier(tblName), part);
+        SharedCache.addPartitionToCache(HiveStringUtils.normalizeIdentifier(part.getDbName()),
+            HiveStringUtils.normalizeIdentifier(part.getTableName()), part);
       }
     }
     return succ;
@@ -545,6 +545,8 @@ public class CachedStore implements RawStore, Configurable {
         HiveStringUtils.normalizeIdentifier(tableName), part_vals);
     if (part != null) {
       part.unsetPrivileges();
+    } else {
+      throw new NoSuchObjectException();
     }
     return part;
   }
@@ -782,6 +784,7 @@ public class CachedStore implements RawStore, Configurable {
     for (String partName : partNames) {
       Partition part = SharedCache.getPartitionFromCache(HiveStringUtils.normalizeIdentifier(dbName),
           HiveStringUtils.normalizeIdentifier(tblName), partNameToVals(partName));
+      part.unsetPrivileges();
       result.add(part);
     }
     return hasUnknownPartitions;
@@ -1046,7 +1049,7 @@ public class CachedStore implements RawStore, Configurable {
         }
       }
       if (!psMatch) {
-        break;
+        continue;
       }
       if (maxParts == -1 || count < maxParts) {
         partNames.add(Warehouse.makePartName(t.getPartitionKeys(), part.getValues()));

http://git-wip-us.apache.org/repos/asf/hive/blob/8a7b5b56/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
index 3172f92..7dfa38f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
@@ -653,7 +653,7 @@ public class HBaseUtils {
       }
     }
     if (sd.getBucketCols() != null) {
-      SortedSet<String> bucketCols = new TreeSet<>(sd.getBucketCols());
+      List<String> bucketCols = new ArrayList<>(sd.getBucketCols());
       for (String bucket : bucketCols) md.update(bucket.getBytes(ENCODING));
     }
     if (sd.getSortCols() != null) {
@@ -688,6 +688,7 @@ public class HBaseUtils {
           md.update(e.getValue().getBytes(ENCODING));
         }
       }
+      md.update(sd.isStoredAsSubDirectories() ? "true".getBytes(ENCODING) : "false".getBytes(ENCODING));
     }
 
     return md.digest();


[21/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_select.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_select.q.out b/ql/src/test/results/clientpositive/llap/subquery_select.q.out
index e96067f..7280b05 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_select.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_select.q.out
@@ -16,10 +16,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -35,13 +35,6 @@ STAGE PLANS:
                       sort order: 
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -55,13 +48,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -116,7 +102,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -133,7 +119,7 @@ STAGE PLANS:
                     sort order: 
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -214,10 +200,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -235,13 +221,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -257,13 +236,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -320,7 +292,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -341,7 +313,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -430,10 +402,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -449,13 +421,6 @@ STAGE PLANS:
                       sort order: 
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -469,13 +434,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -530,7 +488,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -547,7 +505,7 @@ STAGE PLANS:
                     sort order: 
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -628,11 +586,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
+        Reducer 6 <- Map 1 (SIMPLE_EDGE)
         Reducer 8 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -651,33 +609,21 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
+                      aggregations: max(p_size)
                       keys: p_type (type: string)
                       mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: int)
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -695,54 +641,26 @@ STAGE PLANS:
                         value expressions: _col1 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: max(p_size)
                       keys: p_type (type: string)
                       mode: hash
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col1 (type: int)
+                        Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: max(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: int), _col0 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: _col0 is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: _col0 (type: int), _col1 (type: string), true (type: boolean)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col1 (type: string), _col0 (type: int)
-                        sort order: ++
-                        Map-reduce partition columns: _col1 (type: string), _col0 (type: int)
-                        Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col2 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -806,27 +724,28 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
+                aggregations: max(VALUE._col0)
                 keys: KEY._col0 (type: string)
                 mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
-                  aggregations: count()
+                  aggregations: count(), count(_col1)
                   keys: _col0 (type: string)
                   mode: complete
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: string)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: bigint)
-        Reducer 8 
+                    Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -835,18 +754,43 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: int), _col0 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: _col0 is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string), true (type: boolean)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: string), _col0 (type: int)
+                        sort order: ++
+                        Map-reduce partition columns: _col1 (type: string), _col0 (type: int)
+                        Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col2 (type: boolean)
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
-                  aggregations: count(), count(_col1)
+                  aggregations: count()
                   keys: _col0 (type: string)
                   mode: complete
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: string)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
+                    Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: bigint)
 
   Stage: Stage-0
     Fetch Operator
@@ -1726,8 +1670,8 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1743,13 +1687,6 @@ STAGE PLANS:
                       sort order: 
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -1783,7 +1720,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1839,7 +1776,7 @@ POSTHOOK: Input: default@part
 2	46
 46	46
 23	46
-Warning: Shuffle Join MERGEJOIN[30][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 4' is a cross product
+Warning: Shuffle Join MERGEJOIN[30][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
 PREHOOK: query: explain
 select *
 from src b
@@ -1867,10 +1804,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
+        Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1887,13 +1824,6 @@ STAGE PLANS:
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((value = value) and (key > '9')) (type: boolean)
                     Statistics: Num rows: 83 Data size: 14774 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1907,7 +1837,7 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
+        Map 5 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -1945,7 +1875,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -1966,7 +1896,7 @@ STAGE PLANS:
                     sort order: ++
                     Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                     Statistics: Num rows: 36 Data size: 9900 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1979,7 +1909,7 @@ STAGE PLANS:
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                   Statistics: Num rows: 36 Data size: 9900 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1998,7 +1928,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join MERGEJOIN[30][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 4' is a cross product
+Warning: Shuffle Join MERGEJOIN[30][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
 PREHOOK: query: select *
 from src b
 where b.key in
@@ -2048,9 +1978,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2067,13 +1997,6 @@ STAGE PLANS:
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((value = value) and (key > '9')) (type: boolean)
                     Statistics: Num rows: 83 Data size: 14774 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2086,13 +2009,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col1 (type: string)
                         Statistics: Num rows: 83 Data size: 14774 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: sc
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: value is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2128,7 +2044,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -2153,7 +2069,7 @@ STAGE PLANS:
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 41 Data size: 11275 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 6 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2238,9 +2154,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2276,13 +2192,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: tnull
-                  Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     keys: i (type: int)
                     mode: hash
@@ -2346,7 +2255,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2699,8 +2608,8 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2718,13 +2627,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2764,7 +2666,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2841,8 +2743,8 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2860,13 +2762,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2906,7 +2801,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2984,10 +2879,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3018,13 +2913,6 @@ STAGE PLANS:
                       key expressions: _col0 (type: int)
                       sort order: +
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: _col0
@@ -3106,7 +2994,7 @@ STAGE PLANS:
                     Reduce Output Operator
                       sort order: 
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -3183,14 +3071,14 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (CUSTOM_SIMPLE_EDGE)
-        Reducer 13 <- Map 12 (CUSTOM_SIMPLE_EDGE)
+        Reducer 10 <- Map 9 (CUSTOM_SIMPLE_EDGE)
+        Reducer 11 <- Map 9 (CUSTOM_SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Reducer 13 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Reducer 11 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
         Reducer 7 <- Map 6 (SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 6 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3210,46 +3098,6 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col2 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: p_name (type: string)
-                    outputColumnNames: p_name
-                    Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: min(p_name)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 12 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: p_name (type: string)
-                    outputColumnNames: p_name
-                    Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: min(p_name)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 6 
             Map Operator Tree:
                 TableScan
@@ -3270,13 +3118,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -3294,7 +3135,40 @@ STAGE PLANS:
                         value expressions: _col1 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
+        Map 9 
+            Map Operator Tree:
+                TableScan
+                  alias: part
+                  Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: p_name (type: string)
+                    outputColumnNames: p_name
+                    Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: min(p_name)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string)
+                  Select Operator
+                    expressions: p_name (type: string)
+                    outputColumnNames: p_name
+                    Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: min(p_name)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3311,7 +3185,7 @@ STAGE PLANS:
                     sort order: 
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 13 
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3421,7 +3295,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3636,7 +3510,7 @@ POSTHOOK: Input: default@part
 45	false
 Warning: Shuffle Join MERGEJOIN[50][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 5' is a cross product
 Warning: Shuffle Join MERGEJOIN[51][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
-Warning: Shuffle Join MERGEJOIN[52][tables = [$hdt$_2, $hdt$_3]] in Stage 'Reducer 11' is a cross product
+Warning: Shuffle Join MERGEJOIN[52][tables = [$hdt$_2, $hdt$_3]] in Stage 'Reducer 9' is a cross product
 PREHOOK: query: explain select p_size, (p_size IN
     (select (select max(p_size) from part) as sb from part order by sb limit 1)) = true
    from part
@@ -3654,15 +3528,15 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (CUSTOM_SIMPLE_EDGE), Reducer 14 (CUSTOM_SIMPLE_EDGE)
-        Reducer 12 <- Reducer 11 (SIMPLE_EDGE)
-        Reducer 14 <- Map 13 (CUSTOM_SIMPLE_EDGE)
+        Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Map 11 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
         Reducer 7 <- Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 8 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 9 <- Map 11 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3678,25 +3552,19 @@ STAGE PLANS:
                       sort order: 
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
+                    expressions: p_size (type: int)
+                    outputColumnNames: p_size
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 13 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: max(p_size)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: int)
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -3712,7 +3580,7 @@ STAGE PLANS:
                         value expressions: _col0 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 4 
+        Map 11 
             Map Operator Tree:
                 TableScan
                   alias: part
@@ -3722,49 +3590,14 @@ STAGE PLANS:
                     Reduce Output Operator
                       sort order: 
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: p_size (type: int)
-                    outputColumnNames: p_size
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: max(p_size)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: int)
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Left Outer Join0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col1
-                Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: int)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: int)
-                    sort order: +
-                    Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                    TopN Hash Memory Usage: 0.1
-        Reducer 12 
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -3784,18 +3617,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: boolean)
-        Reducer 14 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: max(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: int)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -3835,6 +3656,18 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 4 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: max(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: int)
         Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
@@ -3885,7 +3718,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3897,6 +3730,26 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col1
+                Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: int)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                    TopN Hash Memory Usage: 0.1
 
   Stage: Stage-0
     Fetch Operator
@@ -3906,7 +3759,7 @@ STAGE PLANS:
 
 Warning: Shuffle Join MERGEJOIN[50][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 5' is a cross product
 Warning: Shuffle Join MERGEJOIN[51][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
-Warning: Shuffle Join MERGEJOIN[52][tables = [$hdt$_2, $hdt$_3]] in Stage 'Reducer 11' is a cross product
+Warning: Shuffle Join MERGEJOIN[52][tables = [$hdt$_2, $hdt$_3]] in Stage 'Reducer 9' is a cross product
 PREHOOK: query: select p_size, (p_size IN
     (select (select max(p_size) from part) as sb from part order by sb limit 1)) = true
    from part
@@ -4336,7 +4189,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@part
 #### A masked pattern was here ####
 true
-Warning: Shuffle Join MERGEJOIN[38][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 4' is a cross product
+Warning: Shuffle Join MERGEJOIN[38][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
 PREHOOK: query: explain select o.p_size, (select count(distinct p_type) from part p where p.p_partkey = o.p_partkey) tmp
     FROM part o right join (select * from part where p_size > (select avg(p_size) from part)) t on t.p_partkey = o.p_partkey
 PREHOOK: type: QUERY
@@ -4352,10 +4205,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 7 <- Map 6 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4373,13 +4226,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_partkey (type: int), p_size (type: int)
                     outputColumnNames: _col0, _col1
@@ -4390,7 +4236,7 @@ STAGE PLANS:
                       value expressions: _col0 (type: int), _col1 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 5 
+        Map 4 
             Map Operator Tree:
                 TableScan
                   alias: part
@@ -4410,7 +4256,7 @@ STAGE PLANS:
                         value expressions: _col0 (type: struct<count:bigint,sum:double,input:int>)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: p
@@ -4454,7 +4300,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -4477,7 +4323,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 8 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4489,7 +4335,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: double)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4520,7 +4366,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join MERGEJOIN[38][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 4' is a cross product
+Warning: Shuffle Join MERGEJOIN[38][tables = [$hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
 PREHOOK: query: select o.p_size, (select count(distinct p_type) from part p where p.p_partkey = o.p_partkey) tmp
     FROM part o right join (select * from part where p_size > (select avg(p_size) from part)) t on t.p_partkey = o.p_partkey
 PREHOOK: type: QUERY
@@ -4563,14 +4409,14 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (CUSTOM_SIMPLE_EDGE)
-        Reducer 13 <- Map 12 (CUSTOM_SIMPLE_EDGE)
+        Reducer 10 <- Map 6 (CUSTOM_SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
-        Reducer 4 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Reducer 13 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
         Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 8 <- Map 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 9 <- Map 6 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4585,7 +4431,7 @@ STAGE PLANS:
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: part
@@ -4595,89 +4441,56 @@ STAGE PLANS:
                     outputColumnNames: p_size
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: avg(p_size)
+                      aggregations: max(p_size)
                       mode: hash
                       outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: struct<count:bigint,sum:double,input:int>)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 12 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: int)
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: sum(p_size)
+                      aggregations: min(p_size)
                       mode: hash
                       outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: int)
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: max(p_size)
+                      aggregations: avg(p_size)
                       mode: hash
                       outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: struct<count:bigint,sum:double,input:int>)
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: min(p_size)
+                      aggregations: sum(p_size)
                       mode: hash
                       outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: int)
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: double)
-        Reducer 13 
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4768,7 +4581,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4780,6 +4593,18 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: double)
 
   Stage: Stage-0
     Fetch Operator
@@ -5318,15 +5143,15 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Reducer 14 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 11 <- Reducer 10 (CUSTOM_SIMPLE_EDGE)
-        Reducer 14 <- Map 13 (SIMPLE_EDGE)
-        Reducer 16 <- Map 15 (SIMPLE_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 2 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
-        Reducer 9 <- Map 12 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+        Reducer 10 <- Map 7 (SIMPLE_EDGE)
+        Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 10 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 9 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5344,9 +5169,44 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
+                  Filter Operator
+                    predicate: p_partkey is not null (type: boolean)
+                    Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: sum(p_size)
+                      keys: p_partkey (type: int)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 12 
+        Map 11 
+            Map Operator Tree:
+                TableScan
+                  alias: p
+                  Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (p_type is not null and p_size is not null) (type: boolean)
+                    Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: p_partkey (type: int), p_type (type: string), p_size (type: int)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: string), _col2 (type: int)
+                        sort order: ++
+                        Map-reduce partition columns: _col1 (type: string), _col2 (type: int)
+                        Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: int)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 13 
             Map Operator Tree:
                 TableScan
                   alias: pp
@@ -5365,10 +5225,10 @@ STAGE PLANS:
                         Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 13 
+        Map 7 
             Map Operator Tree:
                 TableScan
-                  alias: a1
+                  alias: t2
                   Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_partkey is not null (type: boolean)
@@ -5383,18 +5243,11 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 15 
-            Map Operator Tree:
-                TableScan
-                  alias: a1
-                  Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_partkey is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: sum(p_size)
+                      aggregations: count()
                       keys: p_partkey (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1
@@ -5405,13 +5258,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: t2
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_partkey is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
@@ -5427,84 +5273,9 @@ STAGE PLANS:
                         Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: t2
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: p_partkey is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: count()
-                      keys: p_partkey (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: (p_type is not null and p_size is not null) (type: boolean)
-                    Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: p_partkey (type: int), p_type (type: string), p_size (type: int)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col1 (type: string), _col2 (type: int)
-                        sort order: ++
-                        Map-reduce partition columns: _col1 (type: string), _col2 (type: int)
-                        Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Left Outer Join0 to 1
-                     Inner Join 0 to 2
-                keys:
-                  0 _col0 (type: int)
-                  1 _col0 (type: int)
-                  2 _col0 (type: int)
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: bigint)
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: bigint)
-        Reducer 14 
-            Execution mode: llap
-            Reduce Operator Tree:
               Group By Operator
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
@@ -5528,31 +5299,22 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 16 
+        Reducer 12 
             Execution mode: llap
             Reduce Operator Tree:
-              Group By Operator
-                aggregations: sum(VALUE._col0)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: bigint)
-                  outputColumnNames: _col1, _col2
-                  Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: (_col2 > 0) (type: boolean)
-                    Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: _col1 (type: int)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 _col1 (type: string), _col2 (type: int)
+                  1 _col0 (type: string), _col1 (type: int)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: int)
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -5592,9 +5354,67 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 4 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: bigint)
+                  outputColumnNames: _col1, _col2
+                  Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col2 > 0) (type: boolean)
+                    Statistics: Num rows: 4 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
         Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                     Inner Join 0 to 2
+                keys:
+                  0 _col0 (type: int)
+                  1 _col0 (type: int)
+                  2 _col0 (type: int)
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: count()
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col0 (type: bigint)
+        Reducer 6 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: bigint)
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
               Group By Operator
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
@@ -5618,7 +5438,7 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 4 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 7 
+        Reducer 9 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5637,22 +5457,6 @@ STAGE PLANS:
                     Map-reduce partition columns: _col1 (type: int)
                     Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: bigint)
-        Reducer 9 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 _col1 (type: string), _col2 (type: int)
-                  1 _col0 (type: string), _col1 (type: int)
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: int)
-                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
 
   Stage: Stage-0
     Fetch Operator
@@ -5718,8 +5522,8 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5737,13 +5541,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -5783,7 +5580,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator


[08/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query88.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query88.q.out b/ql/src/test/results/clientpositive/perf/query88.q.out
index fcb4042..f7af4ef 100644
--- a/ql/src/test/results/clientpositive/perf/query88.q.out
+++ b/ql/src/test/results/clientpositive/perf/query88.q.out
@@ -186,39 +186,39 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 15 (SIMPLE_EDGE)
-Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 13 <- Map 17 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (CUSTOM_SIMPLE_EDGE)
+Reducer 11 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 12 <- Map 36 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 13 <- Map 37 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
 Reducer 14 <- Reducer 13 (CUSTOM_SIMPLE_EDGE)
-Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 23 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 20 <- Map 24 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
-Reducer 21 <- Map 25 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
+Reducer 15 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 16 <- Map 36 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 17 <- Map 37 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
+Reducer 18 <- Reducer 17 (CUSTOM_SIMPLE_EDGE)
+Reducer 19 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 20 <- Map 36 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 21 <- Map 37 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
 Reducer 22 <- Reducer 21 (CUSTOM_SIMPLE_EDGE)
-Reducer 27 <- Map 26 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
-Reducer 28 <- Map 32 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
-Reducer 29 <- Map 33 (SIMPLE_EDGE), Reducer 28 (SIMPLE_EDGE)
-Reducer 3 <- Map 8 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 23 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 24 <- Map 36 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
+Reducer 25 <- Map 37 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 26 <- Reducer 25 (CUSTOM_SIMPLE_EDGE)
+Reducer 27 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 28 <- Map 36 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
+Reducer 29 <- Map 37 (SIMPLE_EDGE), Reducer 28 (SIMPLE_EDGE)
+Reducer 3 <- Map 36 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 30 <- Reducer 29 (CUSTOM_SIMPLE_EDGE)
-Reducer 35 <- Map 34 (SIMPLE_EDGE), Map 39 (SIMPLE_EDGE)
-Reducer 36 <- Map 40 (SIMPLE_EDGE), Reducer 35 (SIMPLE_EDGE)
-Reducer 37 <- Map 41 (SIMPLE_EDGE), Reducer 36 (SIMPLE_EDGE)
-Reducer 38 <- Reducer 37 (CUSTOM_SIMPLE_EDGE)
-Reducer 4 <- Map 9 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 43 <- Map 42 (SIMPLE_EDGE), Map 47 (SIMPLE_EDGE)
-Reducer 44 <- Map 48 (SIMPLE_EDGE), Reducer 43 (SIMPLE_EDGE)
-Reducer 45 <- Map 49 (SIMPLE_EDGE), Reducer 44 (SIMPLE_EDGE)
-Reducer 46 <- Reducer 45 (CUSTOM_SIMPLE_EDGE)
+Reducer 31 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 32 <- Map 36 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
+Reducer 33 <- Map 37 (SIMPLE_EDGE), Reducer 32 (SIMPLE_EDGE)
+Reducer 34 <- Reducer 33 (CUSTOM_SIMPLE_EDGE)
+Reducer 4 <- Map 37 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
-Reducer 51 <- Map 50 (SIMPLE_EDGE), Map 55 (SIMPLE_EDGE)
-Reducer 52 <- Map 56 (SIMPLE_EDGE), Reducer 51 (SIMPLE_EDGE)
-Reducer 53 <- Map 57 (SIMPLE_EDGE), Reducer 52 (SIMPLE_EDGE)
-Reducer 54 <- Reducer 53 (CUSTOM_SIMPLE_EDGE)
-Reducer 59 <- Map 58 (SIMPLE_EDGE), Map 63 (SIMPLE_EDGE)
-Reducer 6 <- Reducer 14 (CUSTOM_SIMPLE_EDGE), Reducer 22 (CUSTOM_SIMPLE_EDGE), Reducer 30 (CUSTOM_SIMPLE_EDGE), Reducer 38 (CUSTOM_SIMPLE_EDGE), Reducer 46 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE), Reducer 54 (CUSTOM_SIMPLE_EDGE), Reducer 62 (CUSTOM_SIMPLE_EDGE)
-Reducer 60 <- Map 64 (SIMPLE_EDGE), Reducer 59 (SIMPLE_EDGE)
-Reducer 61 <- Map 65 (SIMPLE_EDGE), Reducer 60 (SIMPLE_EDGE)
-Reducer 62 <- Reducer 61 (CUSTOM_SIMPLE_EDGE)
+Reducer 6 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 14 (CUSTOM_SIMPLE_EDGE), Reducer 18 (CUSTOM_SIMPLE_EDGE), Reducer 22 (CUSTOM_SIMPLE_EDGE), Reducer 26 (CUSTOM_SIMPLE_EDGE), Reducer 30 (CUSTOM_SIMPLE_EDGE), Reducer 34 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+Reducer 7 <- Map 1 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 8 <- Map 36 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 9 <- Map 37 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -230,452 +230,424 @@ Stage-0
           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
           Merge Join Operator [MERGEJOIN_347] (rows=1 width=65)
             Conds:(Inner),(Inner),(Inner),(Inner),(Inner),(Inner),(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-          <-Reducer 14 [CUSTOM_SIMPLE_EDGE]
+          <-Reducer 10 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_209]
               Group By Operator [GBY_50] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 13 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 9 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_49]
                   Group By Operator [GBY_48] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_328] (rows=766650239 width=88)
                       Conds:RS_44._col2=RS_45._col0(Inner)
-                    <-Map 17 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_45]
                         PartitionCols:_col0
                         Select Operator [SEL_37] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_298] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_35] (rows=1704 width=1910)
+                            TableScan [TS_9] (rows=1704 width=1910)
                               default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 12 [SIMPLE_EDGE]
+                    <-Reducer 8 [SIMPLE_EDGE]
                       SHUFFLE [RS_44]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_327] (rows=696954748 width=88)
                           Conds:RS_41._col1=RS_42._col0(Inner),Output:["_col2"]
-                        <-Map 16 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_42]
                             PartitionCols:_col0
                             Select Operator [SEL_34] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_297] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_32] (rows=7200 width=107)
+                                TableScan [TS_6] (rows=7200 width=107)
                                   default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 11 [SIMPLE_EDGE]
+                        <-Reducer 7 [SIMPLE_EDGE]
                           SHUFFLE [RS_41]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_326] (rows=633595212 width=88)
                               Conds:RS_38._col0=RS_39._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 10 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_38]
                                 PartitionCols:_col0
                                 Select Operator [SEL_28] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_295] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_26] (rows=575995635 width=88)
+                                    TableScan [TS_0] (rows=575995635 width=88)
                                       default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 15 [SIMPLE_EDGE]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_39]
                                 PartitionCols:_col0
                                 Select Operator [SEL_31] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_296] (rows=14400 width=471)
                                     predicate:((t_hour = 12) and (t_minute < 30) and t_time_sk is not null)
-                                    TableScan [TS_29] (rows=86400 width=471)
+                                    TableScan [TS_3] (rows=86400 width=471)
                                       default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 22 [CUSTOM_SIMPLE_EDGE]
+          <-Reducer 14 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_210]
               Group By Operator [GBY_76] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 21 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 13 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_75]
                   Group By Operator [GBY_74] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_331] (rows=766650239 width=88)
                       Conds:RS_70._col2=RS_71._col0(Inner)
-                    <-Map 25 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_71]
                         PartitionCols:_col0
                         Select Operator [SEL_63] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_302] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_61] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 20 [SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 12 [SIMPLE_EDGE]
                       SHUFFLE [RS_70]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_330] (rows=696954748 width=88)
                           Conds:RS_67._col1=RS_68._col0(Inner),Output:["_col2"]
-                        <-Map 24 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_68]
                             PartitionCols:_col0
                             Select Operator [SEL_60] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_301] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_58] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 19 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 11 [SIMPLE_EDGE]
                           SHUFFLE [RS_67]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_329] (rows=633595212 width=88)
                               Conds:RS_64._col0=RS_65._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 18 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_64]
                                 PartitionCols:_col0
                                 Select Operator [SEL_54] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_299] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_52] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 23 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_65]
                                 PartitionCols:_col0
                                 Select Operator [SEL_57] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_300] (rows=14400 width=471)
                                     predicate:((t_hour = 11) and (t_minute >= 30) and t_time_sk is not null)
-                                    TableScan [TS_55] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 30 [CUSTOM_SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_3]
+          <-Reducer 18 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_211]
               Group By Operator [GBY_102] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 29 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 17 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_101]
                   Group By Operator [GBY_100] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_334] (rows=766650239 width=88)
                       Conds:RS_96._col2=RS_97._col0(Inner)
-                    <-Map 33 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_97]
                         PartitionCols:_col0
                         Select Operator [SEL_89] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_306] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_87] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 28 [SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 16 [SIMPLE_EDGE]
                       SHUFFLE [RS_96]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_333] (rows=696954748 width=88)
                           Conds:RS_93._col1=RS_94._col0(Inner),Output:["_col2"]
-                        <-Map 32 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_94]
                             PartitionCols:_col0
                             Select Operator [SEL_86] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_305] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_84] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 27 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 15 [SIMPLE_EDGE]
                           SHUFFLE [RS_93]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_332] (rows=633595212 width=88)
                               Conds:RS_90._col0=RS_91._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 26 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_90]
                                 PartitionCols:_col0
                                 Select Operator [SEL_80] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_303] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_78] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 31 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_91]
                                 PartitionCols:_col0
                                 Select Operator [SEL_83] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_304] (rows=14400 width=471)
                                     predicate:((t_hour = 11) and (t_minute < 30) and t_time_sk is not null)
-                                    TableScan [TS_81] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 38 [CUSTOM_SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_3]
+          <-Reducer 22 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_212]
               Group By Operator [GBY_128] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 37 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 21 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_127]
                   Group By Operator [GBY_126] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_337] (rows=766650239 width=88)
                       Conds:RS_122._col2=RS_123._col0(Inner)
-                    <-Map 41 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_123]
                         PartitionCols:_col0
                         Select Operator [SEL_115] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_310] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_113] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 36 [SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 20 [SIMPLE_EDGE]
                       SHUFFLE [RS_122]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_336] (rows=696954748 width=88)
                           Conds:RS_119._col1=RS_120._col0(Inner),Output:["_col2"]
-                        <-Map 40 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_120]
                             PartitionCols:_col0
                             Select Operator [SEL_112] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_309] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_110] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 35 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 19 [SIMPLE_EDGE]
                           SHUFFLE [RS_119]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_335] (rows=633595212 width=88)
                               Conds:RS_116._col0=RS_117._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 34 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_116]
                                 PartitionCols:_col0
                                 Select Operator [SEL_106] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_307] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_104] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 39 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_117]
                                 PartitionCols:_col0
                                 Select Operator [SEL_109] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_308] (rows=14400 width=471)
                                     predicate:((t_hour = 10) and (t_minute >= 30) and t_time_sk is not null)
-                                    TableScan [TS_107] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 46 [CUSTOM_SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_3]
+          <-Reducer 26 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_213]
               Group By Operator [GBY_154] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 45 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 25 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_153]
                   Group By Operator [GBY_152] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_340] (rows=766650239 width=88)
                       Conds:RS_148._col2=RS_149._col0(Inner)
-                    <-Map 49 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_149]
                         PartitionCols:_col0
                         Select Operator [SEL_141] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_314] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_139] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 44 [SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 24 [SIMPLE_EDGE]
                       SHUFFLE [RS_148]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_339] (rows=696954748 width=88)
                           Conds:RS_145._col1=RS_146._col0(Inner),Output:["_col2"]
-                        <-Map 48 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_146]
                             PartitionCols:_col0
                             Select Operator [SEL_138] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_313] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_136] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 43 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 23 [SIMPLE_EDGE]
                           SHUFFLE [RS_145]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_338] (rows=633595212 width=88)
                               Conds:RS_142._col0=RS_143._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 42 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_142]
                                 PartitionCols:_col0
                                 Select Operator [SEL_132] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_311] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_130] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 47 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_143]
                                 PartitionCols:_col0
                                 Select Operator [SEL_135] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_312] (rows=14400 width=471)
                                     predicate:((t_hour = 10) and (t_minute < 30) and t_time_sk is not null)
-                                    TableScan [TS_133] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 5 [CUSTOM_SIMPLE_EDGE]
-            PARTITION_ONLY_SHUFFLE [RS_208]
-              Group By Operator [GBY_24] (rows=1 width=8)
-                Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 4 [CUSTOM_SIMPLE_EDGE]
-                PARTITION_ONLY_SHUFFLE [RS_23]
-                  Group By Operator [GBY_22] (rows=1 width=8)
-                    Output:["_col0"],aggregations:["count()"]
-                    Merge Join Operator [MERGEJOIN_325] (rows=766650239 width=88)
-                      Conds:RS_18._col2=RS_19._col0(Inner)
-                    <-Map 9 [SIMPLE_EDGE]
-                      SHUFFLE [RS_19]
-                        PartitionCols:_col0
-                        Select Operator [SEL_11] (rows=852 width=1910)
-                          Output:["_col0"]
-                          Filter Operator [FIL_294] (rows=852 width=1910)
-                            predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_9] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 3 [SIMPLE_EDGE]
-                      SHUFFLE [RS_18]
-                        PartitionCols:_col2
-                        Merge Join Operator [MERGEJOIN_324] (rows=696954748 width=88)
-                          Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col2"]
-                        <-Map 8 [SIMPLE_EDGE]
-                          SHUFFLE [RS_16]
-                            PartitionCols:_col0
-                            Select Operator [SEL_8] (rows=3600 width=107)
-                              Output:["_col0"]
-                              Filter Operator [FIL_293] (rows=3600 width=107)
-                                predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_6] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 2 [SIMPLE_EDGE]
-                          SHUFFLE [RS_15]
-                            PartitionCols:_col1
-                            Merge Join Operator [MERGEJOIN_323] (rows=633595212 width=88)
-                              Conds:RS_12._col0=RS_13._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 1 [SIMPLE_EDGE]
-                              SHUFFLE [RS_12]
-                                PartitionCols:_col0
-                                Select Operator [SEL_2] (rows=575995635 width=88)
-                                  Output:["_col0","_col1","_col2"]
-                                  Filter Operator [FIL_291] (rows=575995635 width=88)
-                                    predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_0] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 7 [SIMPLE_EDGE]
-                              SHUFFLE [RS_13]
-                                PartitionCols:_col0
-                                Select Operator [SEL_5] (rows=14400 width=471)
-                                  Output:["_col0"]
-                                  Filter Operator [FIL_292] (rows=14400 width=471)
-                                    predicate:((t_hour = 8) and (t_minute >= 30) and t_time_sk is not null)
-                                    TableScan [TS_3] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 54 [CUSTOM_SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_3]
+          <-Reducer 30 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_214]
               Group By Operator [GBY_180] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 53 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 29 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_179]
                   Group By Operator [GBY_178] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_343] (rows=766650239 width=88)
                       Conds:RS_174._col2=RS_175._col0(Inner)
-                    <-Map 57 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_175]
                         PartitionCols:_col0
                         Select Operator [SEL_167] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_318] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_165] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 52 [SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 28 [SIMPLE_EDGE]
                       SHUFFLE [RS_174]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_342] (rows=696954748 width=88)
                           Conds:RS_171._col1=RS_172._col0(Inner),Output:["_col2"]
-                        <-Map 56 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_172]
                             PartitionCols:_col0
                             Select Operator [SEL_164] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_317] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_162] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 51 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 27 [SIMPLE_EDGE]
                           SHUFFLE [RS_171]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_341] (rows=633595212 width=88)
                               Conds:RS_168._col0=RS_169._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 50 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_168]
                                 PartitionCols:_col0
                                 Select Operator [SEL_158] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_315] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_156] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 55 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_169]
                                 PartitionCols:_col0
                                 Select Operator [SEL_161] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_316] (rows=14400 width=471)
                                     predicate:((t_hour = 9) and (t_minute >= 30) and t_time_sk is not null)
-                                    TableScan [TS_159] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
-          <-Reducer 62 [CUSTOM_SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_3]
+          <-Reducer 34 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_215]
               Group By Operator [GBY_206] (rows=1 width=8)
                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-              <-Reducer 61 [CUSTOM_SIMPLE_EDGE]
+              <-Reducer 33 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_205]
                   Group By Operator [GBY_204] (rows=1 width=8)
                     Output:["_col0"],aggregations:["count()"]
                     Merge Join Operator [MERGEJOIN_346] (rows=766650239 width=88)
                       Conds:RS_200._col2=RS_201._col0(Inner)
-                    <-Map 65 [SIMPLE_EDGE]
+                    <-Map 37 [SIMPLE_EDGE]
                       SHUFFLE [RS_201]
                         PartitionCols:_col0
                         Select Operator [SEL_193] (rows=852 width=1910)
                           Output:["_col0"]
                           Filter Operator [FIL_322] (rows=852 width=1910)
                             predicate:((s_store_name = 'ese') and s_store_sk is not null)
-                            TableScan [TS_191] (rows=1704 width=1910)
-                              default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                    <-Reducer 60 [SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 32 [SIMPLE_EDGE]
                       SHUFFLE [RS_200]
                         PartitionCols:_col2
                         Merge Join Operator [MERGEJOIN_345] (rows=696954748 width=88)
                           Conds:RS_197._col1=RS_198._col0(Inner),Output:["_col2"]
-                        <-Map 64 [SIMPLE_EDGE]
+                        <-Map 36 [SIMPLE_EDGE]
                           SHUFFLE [RS_198]
                             PartitionCols:_col0
                             Select Operator [SEL_190] (rows=3600 width=107)
                               Output:["_col0"]
                               Filter Operator [FIL_321] (rows=3600 width=107)
                                 predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
-                                TableScan [TS_188] (rows=7200 width=107)
-                                  default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                        <-Reducer 59 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 31 [SIMPLE_EDGE]
                           SHUFFLE [RS_197]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_344] (rows=633595212 width=88)
                               Conds:RS_194._col0=RS_195._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 58 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_194]
                                 PartitionCols:_col0
                                 Select Operator [SEL_184] (rows=575995635 width=88)
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_319] (rows=575995635 width=88)
                                     predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
-                                    TableScan [TS_182] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_time_sk","ss_hdemo_sk","ss_store_sk"]
-                            <-Map 63 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
                               SHUFFLE [RS_195]
                                 PartitionCols:_col0
                                 Select Operator [SEL_187] (rows=14400 width=471)
                                   Output:["_col0"]
                                   Filter Operator [FIL_320] (rows=14400 width=471)
                                     predicate:((t_hour = 9) and (t_minute < 30) and t_time_sk is not null)
-                                    TableScan [TS_185] (rows=86400 width=471)
-                                      default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour","t_minute"]
+                                     Please refer to the previous TableScan [TS_3]
+          <-Reducer 5 [CUSTOM_SIMPLE_EDGE]
+            PARTITION_ONLY_SHUFFLE [RS_208]
+              Group By Operator [GBY_24] (rows=1 width=8)
+                Output:["_col0"],aggregations:["count(VALUE._col0)"]
+              <-Reducer 4 [CUSTOM_SIMPLE_EDGE]
+                PARTITION_ONLY_SHUFFLE [RS_23]
+                  Group By Operator [GBY_22] (rows=1 width=8)
+                    Output:["_col0"],aggregations:["count()"]
+                    Merge Join Operator [MERGEJOIN_325] (rows=766650239 width=88)
+                      Conds:RS_18._col2=RS_19._col0(Inner)
+                    <-Map 37 [SIMPLE_EDGE]
+                      SHUFFLE [RS_19]
+                        PartitionCols:_col0
+                        Select Operator [SEL_11] (rows=852 width=1910)
+                          Output:["_col0"]
+                          Filter Operator [FIL_294] (rows=852 width=1910)
+                            predicate:((s_store_name = 'ese') and s_store_sk is not null)
+                             Please refer to the previous TableScan [TS_9]
+                    <-Reducer 3 [SIMPLE_EDGE]
+                      SHUFFLE [RS_18]
+                        PartitionCols:_col2
+                        Merge Join Operator [MERGEJOIN_324] (rows=696954748 width=88)
+                          Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col2"]
+                        <-Map 36 [SIMPLE_EDGE]
+                          SHUFFLE [RS_16]
+                            PartitionCols:_col0
+                            Select Operator [SEL_8] (rows=3600 width=107)
+                              Output:["_col0"]
+                              Filter Operator [FIL_293] (rows=3600 width=107)
+                                predicate:((((hd_dep_count = 3) and (hd_vehicle_count <= 5)) or ((hd_dep_count = 0) and (hd_vehicle_count <= 2)) or ((hd_dep_count = 1) and (hd_vehicle_count <= 3))) and hd_demo_sk is not null)
+                                 Please refer to the previous TableScan [TS_6]
+                        <-Reducer 2 [SIMPLE_EDGE]
+                          SHUFFLE [RS_15]
+                            PartitionCols:_col1
+                            Merge Join Operator [MERGEJOIN_323] (rows=633595212 width=88)
+                              Conds:RS_12._col0=RS_13._col0(Inner),Output:["_col1","_col2"]
+                            <-Map 1 [SIMPLE_EDGE]
+                              SHUFFLE [RS_12]
+                                PartitionCols:_col0
+                                Select Operator [SEL_2] (rows=575995635 width=88)
+                                  Output:["_col0","_col1","_col2"]
+                                  Filter Operator [FIL_291] (rows=575995635 width=88)
+                                    predicate:(ss_hdemo_sk is not null and ss_sold_time_sk is not null and ss_store_sk is not null)
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 35 [SIMPLE_EDGE]
+                              SHUFFLE [RS_13]
+                                PartitionCols:_col0
+                                Select Operator [SEL_5] (rows=14400 width=471)
+                                  Output:["_col0"]
+                                  Filter Operator [FIL_292] (rows=14400 width=471)
+                                    predicate:((t_hour = 8) and (t_minute >= 30) and t_time_sk is not null)
+                                     Please refer to the previous TableScan [TS_3]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query9.q.out b/ql/src/test/results/clientpositive/perf/query9.q.out
index 3c913e1..4dded91 100644
--- a/ql/src/test/results/clientpositive/perf/query9.q.out
+++ b/ql/src/test/results/clientpositive/perf/query9.q.out
@@ -112,36 +112,36 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Reducer 34 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
-Reducer 11 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 36 (CUSTOM_SIMPLE_EDGE)
-Reducer 12 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 38 (CUSTOM_SIMPLE_EDGE)
-Reducer 13 <- Reducer 12 (CUSTOM_SIMPLE_EDGE), Reducer 40 (CUSTOM_SIMPLE_EDGE)
-Reducer 14 <- Reducer 13 (CUSTOM_SIMPLE_EDGE), Reducer 42 (CUSTOM_SIMPLE_EDGE)
-Reducer 15 <- Reducer 14 (CUSTOM_SIMPLE_EDGE), Reducer 44 (CUSTOM_SIMPLE_EDGE)
-Reducer 16 <- Reducer 15 (CUSTOM_SIMPLE_EDGE), Reducer 46 (CUSTOM_SIMPLE_EDGE)
+Reducer 10 <- Reducer 32 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
+Reducer 11 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 21 (CUSTOM_SIMPLE_EDGE)
+Reducer 12 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 27 (CUSTOM_SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (CUSTOM_SIMPLE_EDGE), Reducer 33 (CUSTOM_SIMPLE_EDGE)
+Reducer 14 <- Reducer 13 (CUSTOM_SIMPLE_EDGE), Reducer 22 (CUSTOM_SIMPLE_EDGE)
+Reducer 15 <- Reducer 14 (CUSTOM_SIMPLE_EDGE), Reducer 28 (CUSTOM_SIMPLE_EDGE)
+Reducer 16 <- Reducer 15 (CUSTOM_SIMPLE_EDGE), Reducer 34 (CUSTOM_SIMPLE_EDGE)
 Reducer 18 <- Map 17 (CUSTOM_SIMPLE_EDGE)
+Reducer 19 <- Map 17 (CUSTOM_SIMPLE_EDGE)
 Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 18 (CUSTOM_SIMPLE_EDGE)
-Reducer 20 <- Map 19 (CUSTOM_SIMPLE_EDGE)
-Reducer 22 <- Map 21 (CUSTOM_SIMPLE_EDGE)
+Reducer 20 <- Map 17 (CUSTOM_SIMPLE_EDGE)
+Reducer 21 <- Map 17 (CUSTOM_SIMPLE_EDGE)
+Reducer 22 <- Map 17 (CUSTOM_SIMPLE_EDGE)
 Reducer 24 <- Map 23 (CUSTOM_SIMPLE_EDGE)
-Reducer 26 <- Map 25 (CUSTOM_SIMPLE_EDGE)
-Reducer 28 <- Map 27 (CUSTOM_SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 20 (CUSTOM_SIMPLE_EDGE)
+Reducer 25 <- Map 23 (CUSTOM_SIMPLE_EDGE)
+Reducer 26 <- Map 23 (CUSTOM_SIMPLE_EDGE)
+Reducer 27 <- Map 23 (CUSTOM_SIMPLE_EDGE)
+Reducer 28 <- Map 23 (CUSTOM_SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 24 (CUSTOM_SIMPLE_EDGE)
 Reducer 30 <- Map 29 (CUSTOM_SIMPLE_EDGE)
-Reducer 32 <- Map 31 (CUSTOM_SIMPLE_EDGE)
-Reducer 34 <- Map 33 (CUSTOM_SIMPLE_EDGE)
-Reducer 36 <- Map 35 (CUSTOM_SIMPLE_EDGE)
-Reducer 38 <- Map 37 (CUSTOM_SIMPLE_EDGE)
-Reducer 4 <- Reducer 22 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
-Reducer 40 <- Map 39 (CUSTOM_SIMPLE_EDGE)
-Reducer 42 <- Map 41 (CUSTOM_SIMPLE_EDGE)
-Reducer 44 <- Map 43 (CUSTOM_SIMPLE_EDGE)
-Reducer 46 <- Map 45 (CUSTOM_SIMPLE_EDGE)
-Reducer 5 <- Reducer 24 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
-Reducer 6 <- Reducer 26 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-Reducer 7 <- Reducer 28 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-Reducer 8 <- Reducer 30 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-Reducer 9 <- Reducer 32 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
+Reducer 31 <- Map 29 (CUSTOM_SIMPLE_EDGE)
+Reducer 32 <- Map 29 (CUSTOM_SIMPLE_EDGE)
+Reducer 33 <- Map 29 (CUSTOM_SIMPLE_EDGE)
+Reducer 34 <- Map 29 (CUSTOM_SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE), Reducer 30 (CUSTOM_SIMPLE_EDGE)
+Reducer 5 <- Reducer 19 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+Reducer 6 <- Reducer 25 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+Reducer 7 <- Reducer 31 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
+Reducer 8 <- Reducer 20 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
+Reducer 9 <- Reducer 26 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -177,11 +177,11 @@ Stage-0
                                 PARTITION_ONLY_SHUFFLE [RS_135]
                                   Merge Join Operator [MERGEJOIN_179] (rows=36 width=1961)
                                     Conds:(Left Outer),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9"]
-                                  <-Reducer 34 [CUSTOM_SIMPLE_EDGE]
+                                  <-Reducer 32 [CUSTOM_SIMPLE_EDGE]
                                     PARTITION_ONLY_SHUFFLE [RS_133]
                                       Group By Operator [GBY_64] (rows=1 width=288)
                                         Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                      <-Map 33 [CUSTOM_SIMPLE_EDGE]
+                                      <-Map 29 [CUSTOM_SIMPLE_EDGE]
                                         PARTITION_ONLY_SHUFFLE [RS_63]
                                           Group By Operator [GBY_62] (rows=1 width=288)
                                             Output:["_col0"],aggregations:["avg(ss_net_paid_inc_tax)"]
@@ -189,17 +189,17 @@ Stage-0
                                               Output:["ss_net_paid_inc_tax"]
                                               Filter Operator [FIL_164] (rows=63999515 width=88)
                                                 predicate:ss_quantity BETWEEN 41 AND 60
-                                                TableScan [TS_59] (rows=575995635 width=88)
+                                                TableScan [TS_17] (rows=575995635 width=88)
                                                   default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_net_paid_inc_tax"]
                                   <-Reducer 9 [CUSTOM_SIMPLE_EDGE]
                                     PARTITION_ONLY_SHUFFLE [RS_132]
                                       Merge Join Operator [MERGEJOIN_178] (rows=36 width=1672)
                                         Conds:(Left Outer),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8"]
-                                      <-Reducer 32 [CUSTOM_SIMPLE_EDGE]
+                                      <-Reducer 26 [CUSTOM_SIMPLE_EDGE]
                                         PARTITION_ONLY_SHUFFLE [RS_130]
                                           Group By Operator [GBY_57] (rows=1 width=288)
                                             Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                          <-Map 31 [CUSTOM_SIMPLE_EDGE]
+                                          <-Map 23 [CUSTOM_SIMPLE_EDGE]
                                             PARTITION_ONLY_SHUFFLE [RS_56]
                                               Group By Operator [GBY_55] (rows=1 width=288)
                                                 Output:["_col0"],aggregations:["avg(ss_ext_list_price)"]
@@ -207,34 +207,34 @@ Stage-0
                                                   Output:["ss_ext_list_price"]
                                                   Filter Operator [FIL_163] (rows=63999515 width=88)
                                                     predicate:ss_quantity BETWEEN 41 AND 60
-                                                    TableScan [TS_52] (rows=575995635 width=88)
+                                                    TableScan [TS_10] (rows=575995635 width=88)
                                                       default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_ext_list_price"]
                                       <-Reducer 8 [CUSTOM_SIMPLE_EDGE]
                                         PARTITION_ONLY_SHUFFLE [RS_129]
                                           Merge Join Operator [MERGEJOIN_177] (rows=36 width=1383)
                                             Conds:(Left Outer),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
-                                          <-Reducer 30 [CUSTOM_SIMPLE_EDGE]
+                                          <-Reducer 20 [CUSTOM_SIMPLE_EDGE]
                                             PARTITION_ONLY_SHUFFLE [RS_127]
                                               Group By Operator [GBY_50] (rows=1 width=8)
                                                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                              <-Map 29 [CUSTOM_SIMPLE_EDGE]
+                                              <-Map 17 [CUSTOM_SIMPLE_EDGE]
                                                 PARTITION_ONLY_SHUFFLE [RS_49]
                                                   Group By Operator [GBY_48] (rows=1 width=8)
                                                     Output:["_col0"],aggregations:["count()"]
                                                     Select Operator [SEL_47] (rows=63999515 width=88)
                                                       Filter Operator [FIL_162] (rows=63999515 width=88)
                                                         predicate:ss_quantity BETWEEN 41 AND 60
-                                                        TableScan [TS_45] (rows=575995635 width=88)
+                                                        TableScan [TS_3] (rows=575995635 width=88)
                                                           default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity"]
                                           <-Reducer 7 [CUSTOM_SIMPLE_EDGE]
                                             PARTITION_ONLY_SHUFFLE [RS_126]
                                               Merge Join Operator [MERGEJOIN_176] (rows=36 width=1374)
                                                 Conds:(Left Outer),Output:["_col1","_col2","_col3","_col4","_col5","_col6"]
-                                              <-Reducer 28 [CUSTOM_SIMPLE_EDGE]
+                                              <-Reducer 31 [CUSTOM_SIMPLE_EDGE]
                                                 PARTITION_ONLY_SHUFFLE [RS_124]
                                                   Group By Operator [GBY_43] (rows=1 width=288)
                                                     Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                                  <-Map 27 [CUSTOM_SIMPLE_EDGE]
+                                                  <-Map 29 [CUSTOM_SIMPLE_EDGE]
                                                     PARTITION_ONLY_SHUFFLE [RS_42]
                                                       Group By Operator [GBY_41] (rows=1 width=288)
                                                         Output:["_col0"],aggregations:["avg(ss_net_paid_inc_tax)"]
@@ -242,17 +242,16 @@ Stage-0
                                                           Output:["ss_net_paid_inc_tax"]
                                                           Filter Operator [FIL_161] (rows=63999515 width=88)
                                                             predicate:ss_quantity BETWEEN 21 AND 40
-                                                            TableScan [TS_38] (rows=575995635 width=88)
-                                                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_net_paid_inc_tax"]
+                                                             Please refer to the previous TableScan [TS_17]
                                               <-Reducer 6 [CUSTOM_SIMPLE_EDGE]
                                                 PARTITION_ONLY_SHUFFLE [RS_123]
                                                   Merge Join Operator [MERGEJOIN_175] (rows=36 width=1085)
                                                     Conds:(Left Outer),Output:["_col1","_col2","_col3","_col4","_col5"]
-                                                  <-Reducer 26 [CUSTOM_SIMPLE_EDGE]
+                                                  <-Reducer 25 [CUSTOM_SIMPLE_EDGE]
                                                     PARTITION_ONLY_SHUFFLE [RS_121]
                                                       Group By Operator [GBY_36] (rows=1 width=288)
                                                         Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                                      <-Map 25 [CUSTOM_SIMPLE_EDGE]
+                                                      <-Map 23 [CUSTOM_SIMPLE_EDGE]
                                                         PARTITION_ONLY_SHUFFLE [RS_35]
                                                           Group By Operator [GBY_34] (rows=1 width=288)
                                                             Output:["_col0"],aggregations:["avg(ss_ext_list_price)"]
@@ -260,43 +259,27 @@ Stage-0
                                                               Output:["ss_ext_list_price"]
                                                               Filter Operator [FIL_160] (rows=63999515 width=88)
                                                                 predicate:ss_quantity BETWEEN 21 AND 40
-                                                                TableScan [TS_31] (rows=575995635 width=88)
-                                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_ext_list_price"]
+                                                                 Please refer to the previous TableScan [TS_10]
                                                   <-Reducer 5 [CUSTOM_SIMPLE_EDGE]
                                                     PARTITION_ONLY_SHUFFLE [RS_120]
                                                       Merge Join Operator [MERGEJOIN_174] (rows=36 width=796)
                                                         Conds:(Left Outer),Output:["_col1","_col2","_col3","_col4"]
-                                                      <-Reducer 24 [CUSTOM_SIMPLE_EDGE]
+                                                      <-Reducer 19 [CUSTOM_SIMPLE_EDGE]
                                                         PARTITION_ONLY_SHUFFLE [RS_118]
                                                           Group By Operator [GBY_29] (rows=1 width=8)
                                                             Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                                          <-Map 23 [CUSTOM_SIMPLE_EDGE]
+                                                          <-Map 17 [CUSTOM_SIMPLE_EDGE]
                                                             PARTITION_ONLY_SHUFFLE [RS_28]
                                                               Group By Operator [GBY_27] (rows=1 width=8)
                                                                 Output:["_col0"],aggregations:["count()"]
                                                                 Select Operator [SEL_26] (rows=63999515 width=88)
                                                                   Filter Operator [FIL_159] (rows=63999515 width=88)
                                                                     predicate:ss_quantity BETWEEN 21 AND 40
-                                                                    TableScan [TS_24] (rows=575995635 width=88)
-                                                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity"]
+                                                                     Please refer to the previous TableScan [TS_3]
                                                       <-Reducer 4 [CUSTOM_SIMPLE_EDGE]
                                                         PARTITION_ONLY_SHUFFLE [RS_117]
                                                           Merge Join Operator [MERGEJOIN_173] (rows=36 width=787)
                                                             Conds:(Left Outer),Output:["_col1","_col2","_col3"]
-                                                          <-Reducer 22 [CUSTOM_SIMPLE_EDGE]
-                                                            PARTITION_ONLY_SHUFFLE [RS_115]
-                                                              Group By Operator [GBY_22] (rows=1 width=288)
-                                                                Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                                              <-Map 21 [CUSTOM_SIMPLE_EDGE]
-                                                                PARTITION_ONLY_SHUFFLE [RS_21]
-                                                                  Group By Operator [GBY_20] (rows=1 width=288)
-                                                                    Output:["_col0"],aggregations:["avg(ss_net_paid_inc_tax)"]
-                                                                    Select Operator [SEL_19] (rows=63999515 width=88)
-                                                                      Output:["ss_net_paid_inc_tax"]
-                                                                      Filter Operator [FIL_158] (rows=63999515 width=88)
-                                                                        predicate:ss_quantity BETWEEN 1 AND 20
-                                                                        TableScan [TS_17] (rows=575995635 width=88)
-                                                                          default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_net_paid_inc_tax"]
                                                           <-Reducer 3 [CUSTOM_SIMPLE_EDGE]
                                                             PARTITION_ONLY_SHUFFLE [RS_114]
                                                               Merge Join Operator [MERGEJOIN_172] (rows=36 width=498)
@@ -323,13 +306,12 @@ Stage-0
                                                                             Select Operator [SEL_5] (rows=63999515 width=88)
                                                                               Filter Operator [FIL_156] (rows=63999515 width=88)
                                                                                 predicate:ss_quantity BETWEEN 1 AND 20
-                                                                                TableScan [TS_3] (rows=575995635 width=88)
-                                                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity"]
-                                                              <-Reducer 20 [CUSTOM_SIMPLE_EDGE]
+                                                                                 Please refer to the previous TableScan [TS_3]
+                                                              <-Reducer 24 [CUSTOM_SIMPLE_EDGE]
                                                                 PARTITION_ONLY_SHUFFLE [RS_112]
                                                                   Group By Operator [GBY_15] (rows=1 width=288)
                                                                     Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                                                  <-Map 19 [CUSTOM_SIMPLE_EDGE]
+                                                                  <-Map 23 [CUSTOM_SIMPLE_EDGE]
                                                                     PARTITION_ONLY_SHUFFLE [RS_14]
                                                                       Group By Operator [GBY_13] (rows=1 width=288)
                                                                         Output:["_col0"],aggregations:["avg(ss_ext_list_price)"]
@@ -337,26 +319,37 @@ Stage-0
                                                                           Output:["ss_ext_list_price"]
                                                                           Filter Operator [FIL_157] (rows=63999515 width=88)
                                                                             predicate:ss_quantity BETWEEN 1 AND 20
-                                                                            TableScan [TS_10] (rows=575995635 width=88)
-                                                                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_ext_list_price"]
-                              <-Reducer 36 [CUSTOM_SIMPLE_EDGE]
+                                                                             Please refer to the previous TableScan [TS_10]
+                                                          <-Reducer 30 [CUSTOM_SIMPLE_EDGE]
+                                                            PARTITION_ONLY_SHUFFLE [RS_115]
+                                                              Group By Operator [GBY_22] (rows=1 width=288)
+                                                                Output:["_col0"],aggregations:["avg(VALUE._col0)"]
+                                                              <-Map 29 [CUSTOM_SIMPLE_EDGE]
+                                                                PARTITION_ONLY_SHUFFLE [RS_21]
+                                                                  Group By Operator [GBY_20] (rows=1 width=288)
+                                                                    Output:["_col0"],aggregations:["avg(ss_net_paid_inc_tax)"]
+                                                                    Select Operator [SEL_19] (rows=63999515 width=88)
+                                                                      Output:["ss_net_paid_inc_tax"]
+                                                                      Filter Operator [FIL_158] (rows=63999515 width=88)
+                                                                        predicate:ss_quantity BETWEEN 1 AND 20
+                                                                         Please refer to the previous TableScan [TS_17]
+                              <-Reducer 21 [CUSTOM_SIMPLE_EDGE]
                                 PARTITION_ONLY_SHUFFLE [RS_136]
                                   Group By Operator [GBY_71] (rows=1 width=8)
                                     Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                  <-Map 35 [CUSTOM_SIMPLE_EDGE]
+                                  <-Map 17 [CUSTOM_SIMPLE_EDGE]
                                     PARTITION_ONLY_SHUFFLE [RS_70]
                                       Group By Operator [GBY_69] (rows=1 width=8)
                                         Output:["_col0"],aggregations:["count()"]
                                         Select Operator [SEL_68] (rows=63999515 width=88)
                                           Filter Operator [FIL_165] (rows=63999515 width=88)
                                             predicate:ss_quantity BETWEEN 61 AND 80
-                                            TableScan [TS_66] (rows=575995635 width=88)
-                                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity"]
-                          <-Reducer 38 [CUSTOM_SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_3]
+                          <-Reducer 27 [CUSTOM_SIMPLE_EDGE]
                             PARTITION_ONLY_SHUFFLE [RS_139]
                               Group By Operator [GBY_78] (rows=1 width=288)
                                 Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                              <-Map 37 [CUSTOM_SIMPLE_EDGE]
+                              <-Map 23 [CUSTOM_SIMPLE_EDGE]
                                 PARTITION_ONLY_SHUFFLE [RS_77]
                                   Group By Operator [GBY_76] (rows=1 width=288)
                                     Output:["_col0"],aggregations:["avg(ss_ext_list_price)"]
@@ -364,13 +357,12 @@ Stage-0
                                       Output:["ss_ext_list_price"]
                                       Filter Operator [FIL_166] (rows=63999515 width=88)
                                         predicate:ss_quantity BETWEEN 61 AND 80
-                                        TableScan [TS_73] (rows=575995635 width=88)
-                                          default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_ext_list_price"]
-                      <-Reducer 40 [CUSTOM_SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_10]
+                      <-Reducer 33 [CUSTOM_SIMPLE_EDGE]
                         PARTITION_ONLY_SHUFFLE [RS_142]
                           Group By Operator [GBY_85] (rows=1 width=288)
                             Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                          <-Map 39 [CUSTOM_SIMPLE_EDGE]
+                          <-Map 29 [CUSTOM_SIMPLE_EDGE]
                             PARTITION_ONLY_SHUFFLE [RS_84]
                               Group By Operator [GBY_83] (rows=1 width=288)
                                 Output:["_col0"],aggregations:["avg(ss_net_paid_inc_tax)"]
@@ -378,26 +370,24 @@ Stage-0
                                   Output:["ss_net_paid_inc_tax"]
                                   Filter Operator [FIL_167] (rows=63999515 width=88)
                                     predicate:ss_quantity BETWEEN 61 AND 80
-                                    TableScan [TS_80] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_net_paid_inc_tax"]
-                  <-Reducer 42 [CUSTOM_SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_17]
+                  <-Reducer 22 [CUSTOM_SIMPLE_EDGE]
                     PARTITION_ONLY_SHUFFLE [RS_145]
                       Group By Operator [GBY_92] (rows=1 width=8)
                         Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                      <-Map 41 [CUSTOM_SIMPLE_EDGE]
+                      <-Map 17 [CUSTOM_SIMPLE_EDGE]
                         PARTITION_ONLY_SHUFFLE [RS_91]
                           Group By Operator [GBY_90] (rows=1 width=8)
                             Output:["_col0"],aggregations:["count()"]
                             Select Operator [SEL_89] (rows=63999515 width=88)
                               Filter Operator [FIL_168] (rows=63999515 width=88)
                                 predicate:ss_quantity BETWEEN 81 AND 100
-                                TableScan [TS_87] (rows=575995635 width=88)
-                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity"]
-              <-Reducer 44 [CUSTOM_SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_3]
+              <-Reducer 28 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_148]
                   Group By Operator [GBY_99] (rows=1 width=288)
                     Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                  <-Map 43 [CUSTOM_SIMPLE_EDGE]
+                  <-Map 23 [CUSTOM_SIMPLE_EDGE]
                     PARTITION_ONLY_SHUFFLE [RS_98]
                       Group By Operator [GBY_97] (rows=1 width=288)
                         Output:["_col0"],aggregations:["avg(ss_ext_list_price)"]
@@ -405,13 +395,12 @@ Stage-0
                           Output:["ss_ext_list_price"]
                           Filter Operator [FIL_169] (rows=63999515 width=88)
                             predicate:ss_quantity BETWEEN 81 AND 100
-                            TableScan [TS_94] (rows=575995635 width=88)
-                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_ext_list_price"]
-          <-Reducer 46 [CUSTOM_SIMPLE_EDGE]
+                             Please refer to the previous TableScan [TS_10]
+          <-Reducer 34 [CUSTOM_SIMPLE_EDGE]
             PARTITION_ONLY_SHUFFLE [RS_151]
               Group By Operator [GBY_106] (rows=1 width=288)
                 Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-              <-Map 45 [CUSTOM_SIMPLE_EDGE]
+              <-Map 29 [CUSTOM_SIMPLE_EDGE]
                 PARTITION_ONLY_SHUFFLE [RS_105]
                   Group By Operator [GBY_104] (rows=1 width=288)
                     Output:["_col0"],aggregations:["avg(ss_net_paid_inc_tax)"]
@@ -419,6 +408,5 @@ Stage-0
                       Output:["ss_net_paid_inc_tax"]
                       Filter Operator [FIL_170] (rows=63999515 width=88)
                         predicate:ss_quantity BETWEEN 81 AND 100
-                        TableScan [TS_101] (rows=575995635 width=88)
-                          default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_net_paid_inc_tax"]
+                         Please refer to the previous TableScan [TS_17]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query90.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query90.q.out b/ql/src/test/results/clientpositive/perf/query90.q.out
index 5ae9fe5..aae0aec 100644
--- a/ql/src/test/results/clientpositive/perf/query90.q.out
+++ b/ql/src/test/results/clientpositive/perf/query90.q.out
@@ -6,16 +6,16 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 16 (SIMPLE_EDGE)
-Reducer 13 <- Map 17 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
-Reducer 14 <- Map 18 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-Reducer 15 <- Reducer 14 (CUSTOM_SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
-Reducer 3 <- Map 9 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 10 <- Map 14 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Reducer 10 (CUSTOM_SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
+Reducer 3 <- Map 13 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 14 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
-Reducer 6 <- Reducer 15 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+Reducer 6 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
+Reducer 9 <- Map 13 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -33,61 +33,61 @@ Stage-0
                 Output:["_col0"]
                 Merge Join Operator [MERGEJOIN_92] (rows=1 width=17)
                   Conds:(Inner),Output:["_col0","_col1"]
-                <-Reducer 15 [CUSTOM_SIMPLE_EDGE]
+                <-Reducer 11 [CUSTOM_SIMPLE_EDGE]
                   PARTITION_ONLY_SHUFFLE [RS_53]
                     Group By Operator [GBY_50] (rows=1 width=8)
                       Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                    <-Reducer 14 [CUSTOM_SIMPLE_EDGE]
+                    <-Reducer 10 [CUSTOM_SIMPLE_EDGE]
                       PARTITION_ONLY_SHUFFLE [RS_49]
                         Group By Operator [GBY_48] (rows=1 width=8)
                           Output:["_col0"],aggregations:["count()"]
                           Merge Join Operator [MERGEJOIN_91] (rows=191667562 width=135)
                             Conds:RS_44._col1=RS_45._col0(Inner)
-                          <-Map 18 [SIMPLE_EDGE]
+                          <-Map 14 [SIMPLE_EDGE]
                             SHUFFLE [RS_45]
                               PartitionCols:_col0
                               Select Operator [SEL_37] (rows=3600 width=107)
                                 Output:["_col0"]
                                 Filter Operator [FIL_85] (rows=3600 width=107)
                                   predicate:((hd_dep_count = 8) and hd_demo_sk is not null)
-                                  TableScan [TS_35] (rows=7200 width=107)
+                                  TableScan [TS_9] (rows=7200 width=107)
                                     default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count"]
-                          <-Reducer 13 [SIMPLE_EDGE]
+                          <-Reducer 9 [SIMPLE_EDGE]
                             SHUFFLE [RS_44]
                               PartitionCols:_col1
                               Merge Join Operator [MERGEJOIN_90] (rows=174243235 width=135)
                                 Conds:RS_41._col0=RS_42._col0(Inner),Output:["_col1"]
-                              <-Map 17 [SIMPLE_EDGE]
+                              <-Map 13 [SIMPLE_EDGE]
                                 SHUFFLE [RS_42]
                                   PartitionCols:_col0
                                   Select Operator [SEL_34] (rows=9600 width=471)
                                     Output:["_col0"]
                                     Filter Operator [FIL_84] (rows=9600 width=471)
                                       predicate:(t_hour BETWEEN 14 AND 15 and t_time_sk is not null)
-                                      TableScan [TS_32] (rows=86400 width=471)
+                                      TableScan [TS_6] (rows=86400 width=471)
                                         default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour"]
-                              <-Reducer 12 [SIMPLE_EDGE]
+                              <-Reducer 8 [SIMPLE_EDGE]
                                 SHUFFLE [RS_41]
                                   PartitionCols:_col0
                                   Merge Join Operator [MERGEJOIN_89] (rows=158402938 width=135)
                                     Conds:RS_38._col2=RS_39._col0(Inner),Output:["_col0","_col1"]
-                                  <-Map 11 [SIMPLE_EDGE]
+                                  <-Map 1 [SIMPLE_EDGE]
                                     SHUFFLE [RS_38]
                                       PartitionCols:_col2
                                       Select Operator [SEL_28] (rows=144002668 width=135)
                                         Output:["_col0","_col1","_col2"]
                                         Filter Operator [FIL_82] (rows=144002668 width=135)
                                           predicate:(ws_ship_hdemo_sk is not null and ws_sold_time_sk is not null and ws_web_page_sk is not null)
-                                          TableScan [TS_26] (rows=144002668 width=135)
+                                          TableScan [TS_0] (rows=144002668 width=135)
                                             default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_time_sk","ws_ship_hdemo_sk","ws_web_page_sk"]
-                                  <-Map 16 [SIMPLE_EDGE]
+                                  <-Map 12 [SIMPLE_EDGE]
                                     SHUFFLE [RS_39]
                                       PartitionCols:_col0
                                       Select Operator [SEL_31] (rows=511 width=585)
                                         Output:["_col0"]
                                         Filter Operator [FIL_83] (rows=511 width=585)
                                           predicate:(wp_char_count BETWEEN 5000 AND 5200 and wp_web_page_sk is not null)
-                                          TableScan [TS_29] (rows=4602 width=585)
+                                          TableScan [TS_3] (rows=4602 width=585)
                                             default@web_page,web_page,Tbl:COMPLETE,Col:NONE,Output:["wp_web_page_sk","wp_char_count"]
                 <-Reducer 5 [CUSTOM_SIMPLE_EDGE]
                   PARTITION_ONLY_SHUFFLE [RS_52]
@@ -99,29 +99,27 @@ Stage-0
                           Output:["_col0"],aggregations:["count()"]
                           Merge Join Operator [MERGEJOIN_88] (rows=191667562 width=135)
                             Conds:RS_18._col1=RS_19._col0(Inner)
-                          <-Map 10 [SIMPLE_EDGE]
+                          <-Map 14 [SIMPLE_EDGE]
                             SHUFFLE [RS_19]
                               PartitionCols:_col0
                               Select Operator [SEL_11] (rows=3600 width=107)
                                 Output:["_col0"]
                                 Filter Operator [FIL_81] (rows=3600 width=107)
                                   predicate:((hd_dep_count = 8) and hd_demo_sk is not null)
-                                  TableScan [TS_9] (rows=7200 width=107)
-                                    default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count"]
+                                   Please refer to the previous TableScan [TS_9]
                           <-Reducer 3 [SIMPLE_EDGE]
                             SHUFFLE [RS_18]
                               PartitionCols:_col1
                               Merge Join Operator [MERGEJOIN_87] (rows=174243235 width=135)
                                 Conds:RS_15._col0=RS_16._col0(Inner),Output:["_col1"]
-                              <-Map 9 [SIMPLE_EDGE]
+                              <-Map 13 [SIMPLE_EDGE]
                                 SHUFFLE [RS_16]
                                   PartitionCols:_col0
                                   Select Operator [SEL_8] (rows=9600 width=471)
                                     Output:["_col0"]
                                     Filter Operator [FIL_80] (rows=9600 width=471)
                                       predicate:(t_hour BETWEEN 6 AND 7 and t_time_sk is not null)
-                                      TableScan [TS_6] (rows=86400 width=471)
-                                        default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_hour"]
+                                       Please refer to the previous TableScan [TS_6]
                               <-Reducer 2 [SIMPLE_EDGE]
                                 SHUFFLE [RS_15]
                                   PartitionCols:_col0
@@ -134,15 +132,13 @@ Stage-0
                                         Output:["_col0","_col1","_col2"]
                                         Filter Operator [FIL_78] (rows=144002668 width=135)
                                           predicate:(ws_ship_hdemo_sk is not null and ws_sold_time_sk is not null and ws_web_page_sk is not null)
-                                          TableScan [TS_0] (rows=144002668 width=135)
-                                            default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_time_sk","ws_ship_hdemo_sk","ws_web_page_sk"]
-                                  <-Map 8 [SIMPLE_EDGE]
+                                           Please refer to the previous TableScan [TS_0]
+                                  <-Map 12 [SIMPLE_EDGE]
                                     SHUFFLE [RS_13]
                                       PartitionCols:_col0
                                       Select Operator [SEL_5] (rows=511 width=585)
                                         Output:["_col0"]
                                         Filter Operator [FIL_79] (rows=511 width=585)
                                           predicate:(wp_char_count BETWEEN 5000 AND 5200 and wp_web_page_sk is not null)
-                                          TableScan [TS_3] (rows=4602 width=585)
-                                            default@web_page,web_page,Tbl:COMPLETE,Col:NONE,Output:["wp_web_page_sk","wp_char_count"]
+                                           Please refer to the previous TableScan [TS_3]
 


[27/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/correlationoptimizer3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/correlationoptimizer3.q.out b/ql/src/test/results/clientpositive/llap/correlationoptimizer3.q.out
index d3cfce8..3e71546 100644
--- a/ql/src/test/results/clientpositive/llap/correlationoptimizer3.q.out
+++ b/ql/src/test/results/clientpositive/llap/correlationoptimizer3.q.out
@@ -21,11 +21,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -45,9 +45,21 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 5 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -67,26 +79,7 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -165,7 +158,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -192,7 +185,7 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 14 Data size: 1316 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: bigint)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -256,11 +249,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -280,9 +273,21 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 5 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -302,26 +307,7 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -400,7 +386,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -427,7 +413,7 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 14 Data size: 1316 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: bigint)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -706,11 +692,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -730,9 +716,21 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 5 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -752,26 +750,7 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -850,7 +829,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -877,7 +856,7 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 14 Data size: 1316 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: bigint)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -941,11 +920,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -965,9 +944,21 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 5 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -987,26 +978,7 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -1085,7 +1057,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -1112,7 +1084,7 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 14 Data size: 1316 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: bigint)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/correlationoptimizer6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/correlationoptimizer6.q.out b/ql/src/test/results/clientpositive/llap/correlationoptimizer6.q.out
index 4fec286..82dae9a 100644
--- a/ql/src/test/results/clientpositive/llap/correlationoptimizer6.q.out
+++ b/ql/src/test/results/clientpositive/llap/correlationoptimizer6.q.out
@@ -1793,9 +1793,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1815,47 +1815,40 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: xx
-                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 4 
+        Map 5 
             Map Operator Tree:
                 TableScan
-                  alias: x
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  alias: xx
+                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -1898,7 +1891,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -1921,7 +1914,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 6 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2025,9 +2018,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2047,47 +2040,40 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: xx
-                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 4 
+        Map 5 
             Map Operator Tree:
                 TableScan
-                  alias: x
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  alias: xx
+                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -2130,7 +2116,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -2153,7 +2139,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 6 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2259,7 +2245,7 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Map 6 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2279,13 +2265,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2300,10 +2279,10 @@ STAGE PLANS:
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
+        Map 5 
             Map Operator Tree:
                 TableScan
-                  alias: zz
+                  alias: y
                   Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
@@ -2319,7 +2298,7 @@ STAGE PLANS:
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: xx
@@ -2491,7 +2470,7 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Map 6 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2511,13 +2490,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2532,10 +2504,10 @@ STAGE PLANS:
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
+        Map 5 
             Map Operator Tree:
                 TableScan
-                  alias: zz
+                  alias: y
                   Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
@@ -2551,7 +2523,7 @@ STAGE PLANS:
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: xx

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
index 35dde96..91edcdc 100644
--- a/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynamic_partition_pruning.q.out
@@ -2228,9 +2228,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2239,31 +2239,27 @@ STAGE PLANS:
                   alias: srcpart
                   filterExpr: (ds = '2008-04-08') (type: boolean)
                   Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 1000 Data size: 94000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
+                  Filter Operator
+                    predicate: (ds = '2008-04-08') (type: boolean)
+                    Select Operator
                       Statistics: Num rows: 1000 Data size: 94000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: srcpart
-                  filterExpr: (ds = '2008-04-08') (type: boolean)
-                  Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      keys: '2008-04-08' (type: string)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
+                        sort order: 
+                        Statistics: Num rows: 1000 Data size: 94000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (ds = '2008-04-08') (type: boolean)
+                    Select Operator
+                      Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        keys: '2008-04-08' (type: string)
+                        mode: hash
+                        outputColumnNames: _col0
                         Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 2 
@@ -2300,7 +2296,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5666,10 +5662,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE), Union 4 (CONTAINS)
-        Reducer 5 <- Map 1 (BROADCAST_EDGE), Union 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE), Union 4 (CONTAINS)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
+        Reducer 4 <- Map 1 (BROADCAST_EDGE), Union 3 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
+        Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5678,22 +5674,18 @@ STAGE PLANS:
                   alias: srcpart
                   filterExpr: ds is not null (type: boolean)
                   Statistics: Num rows: 2000 Data size: 389248 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: ds (type: string)
-                    outputColumnNames: _col0
+                  Filter Operator
+                    predicate: ds is not null (type: boolean)
                     Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
+                    Select Operator
+                      expressions: ds (type: string)
+                      outputColumnNames: _col0
                       Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 2 
-            Map Operator Tree:
-                TableScan
-                  alias: srcpart
-                  Statistics: Num rows: 2000 Data size: 389248 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: ds (type: string)
                     outputColumnNames: ds
@@ -5709,7 +5701,7 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: srcpart
@@ -5729,7 +5721,7 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 3 
+        Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5750,7 +5742,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5778,7 +5770,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5793,7 +5785,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5814,8 +5806,8 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-        Union 4 
-            Vertex: Union 4
+        Union 3 
+            Vertex: Union 3
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/except_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/except_distinct.q.out b/ql/src/test/results/clientpositive/llap/except_distinct.q.out
index 4c32ebc..e4c2941 100644
--- a/ql/src/test/results/clientpositive/llap/except_distinct.q.out
+++ b/ql/src/test/results/clientpositive/llap/except_distinct.q.out
@@ -368,13 +368,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE), Union 3 (CONTAINS)
-        Reducer 12 <- Map 11 (SIMPLE_EDGE), Union 5 (CONTAINS)
-        Reducer 14 <- Map 13 (SIMPLE_EDGE), Union 7 (CONTAINS)
+        Reducer 10 <- Map 1 (SIMPLE_EDGE), Union 7 (CONTAINS)
+        Reducer 12 <- Map 11 (SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 4 <- Union 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
         Reducer 6 <- Union 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
         Reducer 8 <- Union 7 (SIMPLE_EDGE)
+        Reducer 9 <- Map 1 (SIMPLE_EDGE), Union 5 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -398,13 +398,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 250 Data size: 46500 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 11 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -421,13 +414,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 250 Data size: 46500 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 13 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -446,7 +432,7 @@ STAGE PLANS:
                         value expressions: _col2 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 11 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -485,18 +471,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint), (_col2 * _col3) (type: bigint)
                     outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 500 Data size: 97000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 274 Data size: 53156 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: sum(_col2), sum(_col3)
                       keys: _col0 (type: string), _col1 (type: string)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 250 Data size: 48500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 137 Data size: 26578 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string), _col1 (type: string)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                        Statistics: Num rows: 250 Data size: 48500 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 137 Data size: 26578 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint), _col3 (type: bigint)
         Reducer 12 
             Execution mode: llap
@@ -514,47 +500,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint), (_col2 * _col3) (type: bigint)
                     outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 291 Data size: 56454 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: sum(_col2), sum(_col3)
-                      keys: _col0 (type: string), _col1 (type: string)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 145 Data size: 28130 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string), _col1 (type: string)
-                        sort order: ++
-                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                        Statistics: Num rows: 145 Data size: 28130 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col2 (type: bigint), _col3 (type: bigint)
-        Reducer 14 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: string), KEY._col1 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 250 Data size: 46500 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), 1 (type: bigint), _col2 (type: bigint)
-                  outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 250 Data size: 48500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint), (_col2 * _col3) (type: bigint)
-                    outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 274 Data size: 53156 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 500 Data size: 97000 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: sum(_col2), sum(_col3)
                       keys: _col0 (type: string), _col1 (type: string)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 137 Data size: 26578 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 250 Data size: 48500 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string), _col1 (type: string)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                        Statistics: Num rows: 137 Data size: 26578 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 250 Data size: 48500 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint), _col3 (type: bigint)
         Reducer 2 
             Execution mode: llap
@@ -692,6 +649,35 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 250 Data size: 46500 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string), 1 (type: bigint), _col2 (type: bigint)
+                  outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 250 Data size: 48500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint), (_col2 * _col3) (type: bigint)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 291 Data size: 56454 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: sum(_col2), sum(_col3)
+                      keys: _col0 (type: string), _col1 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 145 Data size: 28130 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                        Statistics: Num rows: 145 Data size: 28130 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col2 (type: bigint), _col3 (type: bigint)
         Union 3 
             Vertex: Union 3
         Union 5 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
index 584c3b5..f701cab 100644
--- a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
@@ -1755,8 +1755,8 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-Reducer 4 <- Map 3 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 3 <- Map 1 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -1777,7 +1777,7 @@ Stage-0
                   Output:["_col0","_col1"]
                   TableScan [TS_0] (rows=500 width=178)
                     default@src_cbo,b,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-            <-Reducer 4 [SIMPLE_EDGE] llap
+            <-Reducer 3 [SIMPLE_EDGE] llap
               SHUFFLE [RS_15]
                 PartitionCols:_col0
                 Select Operator [SEL_13] (rows=36 width=95)
@@ -1788,15 +1788,14 @@ Stage-0
                       Output:["_col1"]
                       Group By Operator [GBY_7] (rows=41 width=178)
                         Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                      <-Map 3 [SIMPLE_EDGE] llap
+                      <-Map 1 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_6]
                           PartitionCols:_col0
                           Group By Operator [GBY_5] (rows=41 width=178)
                             Output:["_col0","_col1"],keys:value, key
                             Filter Operator [FIL_21] (rows=83 width=178)
                               predicate:((value = value) and (value > 'val_2'))
-                              TableScan [TS_2] (rows=500 width=178)
-                                default@src_cbo,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                               Please refer to the previous TableScan [TS_0]
 
 PREHOOK: query: explain select * 
 from src_cbo b 
@@ -1820,8 +1819,8 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 2 <- Map 1 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-Reducer 5 <- Map 4 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 4 <- Map 1 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -1849,22 +1848,21 @@ Stage-0
                         Output:["key","value"]
                         TableScan [TS_0] (rows=500 width=178)
                           default@src_cbo,b,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-            <-Reducer 5 [SIMPLE_EDGE] llap
+            <-Reducer 4 [SIMPLE_EDGE] llap
               SHUFFLE [RS_14]
                 PartitionCols:_col0, _col1
                 Select Operator [SEL_12] (rows=20 width=182)
                   Output:["_col0","_col1","_col2"]
                   Group By Operator [GBY_11] (rows=20 width=178)
                     Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                  <-Map 4 [SIMPLE_EDGE] llap
+                  <-Map 1 [SIMPLE_EDGE] llap
                     SHUFFLE [RS_10]
                       PartitionCols:_col0, _col1
                       Group By Operator [GBY_9] (rows=20 width=178)
                         Output:["_col0","_col1"],keys:key, value
                         Filter Operator [FIL_20] (rows=41 width=178)
                           predicate:((value = value) and (key = key) and (value > 'val_12'))
-                          TableScan [TS_6] (rows=500 width=178)
-                            default@src_cbo,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                           Please refer to the previous TableScan [TS_0]
 
 PREHOOK: query: create view cv1 as 
 select * 
@@ -2102,9 +2100,9 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 6 <- Map 5 (SIMPLE_EDGE)
-Reducer 8 <- Map 7 (SIMPLE_EDGE)
+Reducer 7 <- Map 5 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -2151,7 +2149,7 @@ Stage-0
                                 predicate:(key > '8')
                                 TableScan [TS_3] (rows=500 width=87)
                                   default@src_cbo,src_cbo,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
-        <-Reducer 8 [SIMPLE_EDGE] llap
+        <-Reducer 7 [SIMPLE_EDGE] llap
           SHUFFLE [RS_30]
             PartitionCols:_col0
             Group By Operator [GBY_28] (rows=34 width=8)
@@ -2164,15 +2162,14 @@ Stage-0
                     Output:["_col1"]
                     Group By Operator [GBY_24] (rows=69 width=95)
                       Output:["_col0","_col1"],aggregations:["count(VALUE._col0)"],keys:KEY._col0
-                    <-Map 7 [SIMPLE_EDGE] llap
+                    <-Map 5 [SIMPLE_EDGE] llap
                       SHUFFLE [RS_23]
                         PartitionCols:_col0
                         Group By Operator [GBY_22] (rows=69 width=95)
                           Output:["_col0","_col1"],aggregations:["count()"],keys:key
                           Filter Operator [FIL_43] (rows=166 width=87)
                             predicate:(key > '9')
-                            TableScan [TS_19] (rows=500 width=87)
-                              default@src_cbo,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
+                             Please refer to the previous TableScan [TS_3]
 
 PREHOOK: query: explain select p_mfgr, p_name, avg(p_size) 
 from part 
@@ -2254,10 +2251,10 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
 Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
-Reducer 8 <- Map 7 (SIMPLE_EDGE)
+Reducer 7 <- Map 5 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -2291,29 +2288,28 @@ Stage-0
                         Group By Operator [GBY_7] (rows=1 width=16)
                           Output:["_col0","_col1"],aggregations:["count(VALUE._col0)","count(VALUE._col1)"]
                         <-Map 5 [CUSTOM_SIMPLE_EDGE] llap
-                          PARTITION_ONLY_SHUFFLE [RS_6]
+                          SHUFFLE [RS_6]
                             Group By Operator [GBY_5] (rows=1 width=16)
                               Output:["_col0","_col1"],aggregations:["count()","count(key)"]
                               Filter Operator [FIL_28] (rows=166 width=87)
                                 predicate:(key > '2')
                                 TableScan [TS_2] (rows=500 width=87)
                                   default@src_cbo,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
-                <-Reducer 8 [SIMPLE_EDGE] llap
+                <-Reducer 7 [SIMPLE_EDGE] llap
                   SHUFFLE [RS_20]
                     PartitionCols:_col0
                     Select Operator [SEL_15] (rows=69 width=91)
                       Output:["_col0","_col1"]
                       Group By Operator [GBY_14] (rows=69 width=87)
                         Output:["_col0"],keys:KEY._col0
-                      <-Map 7 [SIMPLE_EDGE] llap
+                      <-Map 5 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_13]
                           PartitionCols:_col0
                           Group By Operator [GBY_12] (rows=69 width=87)
                             Output:["_col0"],keys:key
                             Filter Operator [FIL_29] (rows=166 width=87)
                               predicate:(key > '2')
-                              TableScan [TS_9] (rows=500 width=87)
-                                default@src_cbo,s1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
+                               Please refer to the previous TableScan [TS_2]
 
 PREHOOK: query: explain select p_mfgr, b.p_name, p_size 
 from part b 
@@ -2334,10 +2330,10 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-Reducer 5 <- Map 4 (SIMPLE_EDGE)
-Reducer 7 <- Map 6 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+Reducer 4 <- Map 1 (SIMPLE_EDGE)
+Reducer 5 <- Map 1 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -2363,12 +2359,12 @@ Stage-0
                       Output:["_col0","_col1","_col2"]
                       TableScan [TS_0] (rows=26 width=223)
                         default@part,b,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size"]
-                <-Reducer 5 [SIMPLE_EDGE] llap
+                <-Reducer 4 [SIMPLE_EDGE] llap
                   SHUFFLE [RS_19]
                     PartitionCols:_col0
                     Group By Operator [GBY_7] (rows=1 width=114)
                       Output:["_col0","_col1","_col2"],aggregations:["count(VALUE._col0)","count(VALUE._col1)"],keys:KEY._col0
-                    <-Map 4 [SIMPLE_EDGE] llap
+                    <-Map 1 [SIMPLE_EDGE] llap
                       SHUFFLE [RS_6]
                         PartitionCols:_col0
                         Group By Operator [GBY_5] (rows=1 width=114)
@@ -2377,9 +2373,8 @@ Stage-0
                             Output:["p_name","p_mfgr"]
                             Filter Operator [FIL_29] (rows=4 width=223)
                               predicate:((p_size < 10) and (p_mfgr = p_mfgr))
-                              TableScan [TS_2] (rows=26 width=223)
-                                default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size"]
-            <-Reducer 7 [SIMPLE_EDGE] llap
+                               Please refer to the previous TableScan [TS_0]
+            <-Reducer 5 [SIMPLE_EDGE] llap
               SHUFFLE [RS_22]
                 PartitionCols:_col0, _col1
                 Select Operator [SEL_17] (rows=2 width=223)
@@ -2388,7 +2383,7 @@ Stage-0
                     predicate:_col0 is not null
                     Group By Operator [GBY_14] (rows=2 width=219)
                       Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                    <-Map 6 [SIMPLE_EDGE] llap
+                    <-Map 1 [SIMPLE_EDGE] llap
                       SHUFFLE [RS_13]
                         PartitionCols:_col0, _col1
                         Group By Operator [GBY_12] (rows=2 width=219)
@@ -2397,8 +2392,7 @@ Stage-0
                             Output:["p_name","p_mfgr"]
                             Filter Operator [FIL_30] (rows=4 width=223)
                               predicate:((p_size < 10) and (p_mfgr = p_mfgr))
-                              TableScan [TS_9] (rows=26 width=223)
-                                default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size"]
+                               Please refer to the previous TableScan [TS_0]
 
 PREHOOK: query: explain select p_name, p_size 
 from 
@@ -2420,10 +2414,10 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
 Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
-Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE)
+Reducer 7 <- Map 5 (CUSTOM_SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -2466,21 +2460,20 @@ Stage-0
                                   predicate:(p_size < 10)
                                   TableScan [TS_2] (rows=26 width=4)
                                     default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_size"]
-                <-Reducer 8 [SIMPLE_EDGE] llap
+                <-Reducer 7 [SIMPLE_EDGE] llap
                   SHUFFLE [RS_25]
                     PartitionCols:_col0
                     Select Operator [SEL_20] (rows=1 width=12)
                       Output:["_col0","_col1"]
                       Group By Operator [GBY_19] (rows=1 width=8)
                         Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                      <-Map 7 [CUSTOM_SIMPLE_EDGE] llap
+                      <-Map 5 [CUSTOM_SIMPLE_EDGE] llap
                         PARTITION_ONLY_SHUFFLE [RS_18]
                           Group By Operator [GBY_17] (rows=1 width=76)
                             Output:["_col0"],aggregations:["avg(p_size)"]
                             Filter Operator [FIL_35] (rows=8 width=4)
                               predicate:(p_size < 10)
-                              TableScan [TS_14] (rows=26 width=4)
-                                default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_size"]
+                               Please refer to the previous TableScan [TS_2]
 
 PREHOOK: query: explain select b.p_mfgr, min(p_retailprice) 
 from part b 
@@ -2505,13 +2498,13 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 9 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
-Reducer 7 <- Map 6 (SIMPLE_EDGE)
-Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+Reducer 6 <- Map 1 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -2529,26 +2522,6 @@ Stage-0
                 predicate:(not CASE WHEN ((_col3 = 0)) THEN (false) WHEN (_col3 is null) THEN (false) WHEN (_col7 is not null) THEN (true) WHEN (_col0 is null) THEN (null) WHEN ((_col4 < _col3)) THEN (true) ELSE (false) END)
                 Merge Join Operator [MERGEJOIN_44] (rows=5 width=126)
                   Conds:RS_30._col0, _col1=RS_31._col0, _col1(Left Outer),Output:["_col0","_col1","_col3","_col4","_col7"]
-                <-Reducer 10 [SIMPLE_EDGE] llap
-                  SHUFFLE [RS_31]
-                    PartitionCols:_col0, _col1
-                    Select Operator [SEL_26] (rows=1 width=110)
-                      Output:["_col0","_col1","_col2"]
-                      Filter Operator [FIL_25] (rows=1 width=110)
-                        predicate:_col0 is not null
-                        Select Operator [SEL_24] (rows=1 width=110)
-                          Output:["_col0","_col1"]
-                          Filter Operator [FIL_41] (rows=1 width=114)
-                            predicate:((_col1 = _col1) and ((_col2 - _col1) > 600.0))
-                            Group By Operator [GBY_22] (rows=5 width=114)
-                              Output:["_col0","_col1","_col2"],aggregations:["min(VALUE._col0)","max(VALUE._col1)"],keys:KEY._col0
-                            <-Map 9 [SIMPLE_EDGE] llap
-                              SHUFFLE [RS_21]
-                                PartitionCols:_col0
-                                Group By Operator [GBY_20] (rows=5 width=114)
-                                  Output:["_col0","_col1","_col2"],aggregations:["min(p_retailprice)","max(p_retailprice)"],keys:p_mfgr
-                                  TableScan [TS_18] (rows=26 width=106)
-                                    default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_mfgr","p_retailprice"]
                 <-Reducer 3 [SIMPLE_EDGE] llap
                   SHUFFLE [RS_30]
                     PartitionCols:_col0, _col1
@@ -2568,12 +2541,12 @@ Stage-0
                                 Output:["p_mfgr","p_retailprice"]
                                 TableScan [TS_0] (rows=26 width=106)
                                   default@part,b,Tbl:COMPLETE,Col:COMPLETE,Output:["p_mfgr","p_retailprice"]
-                    <-Reducer 8 [SIMPLE_EDGE] llap
+                    <-Reducer 7 [SIMPLE_EDGE] llap
                       SHUFFLE [RS_28]
                         PartitionCols:_col0
                         Group By Operator [GBY_16] (rows=1 width=24)
                           Output:["_col0","_col1","_col2"],aggregations:["count(VALUE._col0)","count(VALUE._col1)"],keys:KEY._col0
-                        <-Reducer 7 [SIMPLE_EDGE] llap
+                        <-Reducer 6 [SIMPLE_EDGE] llap
                           SHUFFLE [RS_15]
                             PartitionCols:_col0
                             Group By Operator [GBY_14] (rows=1 width=24)
@@ -2584,15 +2557,33 @@ Stage-0
                                   predicate:((_col1 = _col1) and ((_col2 - _col1) > 600.0))
                                   Group By Operator [GBY_10] (rows=5 width=114)
                                     Output:["_col0","_col1","_col2"],aggregations:["min(VALUE._col0)","max(VALUE._col1)"],keys:KEY._col0
-                                  <-Map 6 [SIMPLE_EDGE] llap
+                                  <-Map 1 [SIMPLE_EDGE] llap
                                     SHUFFLE [RS_9]
                                       PartitionCols:_col0
                                       Group By Operator [GBY_8] (rows=5 width=114)
                                         Output:["_col0","_col1","_col2"],aggregations:["min(p_retailprice)","max(p_retailprice)"],keys:p_mfgr
                                         Select Operator [SEL_7] (rows=26 width=106)
                                           Output:["p_mfgr","p_retailprice"]
-                                          TableScan [TS_6] (rows=26 width=106)
-                                            default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_mfgr","p_retailprice"]
+                                           Please refer to the previous TableScan [TS_0]
+                <-Reducer 8 [SIMPLE_EDGE] llap
+                  SHUFFLE [RS_31]
+                    PartitionCols:_col0, _col1
+                    Select Operator [SEL_26] (rows=1 width=110)
+                      Output:["_col0","_col1","_col2"]
+                      Filter Operator [FIL_25] (rows=1 width=110)
+                        predicate:_col0 is not null
+                        Select Operator [SEL_24] (rows=1 width=110)
+                          Output:["_col0","_col1"]
+                          Filter Operator [FIL_41] (rows=1 width=114)
+                            predicate:((_col1 = _col1) and ((_col2 - _col1) > 600.0))
+                            Group By Operator [GBY_22] (rows=5 width=114)
+                              Output:["_col0","_col1","_col2"],aggregations:["min(VALUE._col0)","max(VALUE._col1)"],keys:KEY._col0
+                            <-Map 1 [SIMPLE_EDGE] llap
+                              SHUFFLE [RS_21]
+                                PartitionCols:_col0
+                                Group By Operator [GBY_20] (rows=5 width=114)
+                                  Output:["_col0","_col1","_col2"],aggregations:["min(p_retailprice)","max(p_retailprice)"],keys:p_mfgr
+                                   Please refer to the previous TableScan [TS_0]
 
 PREHOOK: query: explain select count(c_int) over(), sum(c_float) over(), max(c_int) over(), min(c_int) over(), row_number() over(), rank() over(), dense_rank() over(), percent_rank() over(), lead(c_int, 2, c_int) over(), lag(c_float, 2, c_float) over() from cbo_t1
 PREHOOK: type: QUERY


[13/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query60.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query60.q.out b/ql/src/test/results/clientpositive/perf/query60.q.out
index ad9d08e..bb05a42 100644
--- a/ql/src/test/results/clientpositive/perf/query60.q.out
+++ b/ql/src/test/results/clientpositive/perf/query60.q.out
@@ -153,26 +153,26 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
-Reducer 12 <- Map 14 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 16 <- Map 15 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
-Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
-Reducer 18 <- Reducer 17 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 20 <- Map 19 (SIMPLE_EDGE)
-Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 24 (SIMPLE_EDGE)
-Reducer 23 <- Map 25 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
-Reducer 27 <- Map 26 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
-Reducer 28 <- Reducer 27 (SIMPLE_EDGE), Reducer 34 (SIMPLE_EDGE)
-Reducer 29 <- Reducer 28 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 31 <- Map 30 (SIMPLE_EDGE)
-Reducer 33 <- Map 32 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
-Reducer 34 <- Map 36 (SIMPLE_EDGE), Reducer 33 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 11 <- Map 1 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
+Reducer 12 <- Reducer 11 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 15 <- Map 14 (SIMPLE_EDGE)
+Reducer 16 <- Map 14 (SIMPLE_EDGE)
+Reducer 17 <- Map 14 (SIMPLE_EDGE)
+Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 21 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 20 <- Map 26 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 27 (SIMPLE_EDGE)
+Reducer 23 <- Map 26 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
+Reducer 24 <- Map 21 (SIMPLE_EDGE), Map 28 (SIMPLE_EDGE)
+Reducer 25 <- Map 26 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
-Reducer 9 <- Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 23 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -189,50 +189,20 @@ Stage-0
               Group By Operator [GBY_117] (rows=335408073 width=108)
                 Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
               <-Union 5 [SIMPLE_EDGE]
-                <-Reducer 18 [CONTAINS]
+                <-Reducer 10 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
                     Group By Operator [GBY_115] (rows=670816147 width=108)
                       Output:["_col0","_col1"],aggregations:["sum(_col1)"],keys:_col0
                       Group By Operator [GBY_72] (rows=191657247 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 17 [SIMPLE_EDGE]
+                      <-Reducer 9 [SIMPLE_EDGE]
                         SHUFFLE [RS_71]
                           PartitionCols:_col0
                           Group By Operator [GBY_70] (rows=383314495 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_184] (rows=383314495 width=135)
                               Conds:RS_66._col0=RS_67._col4(Inner),Output:["_col1","_col8"]
-                            <-Reducer 16 [SIMPLE_EDGE]
-                              SHUFFLE [RS_66]
-                                PartitionCols:_col0
-                                Merge Join Operator [MERGEJOIN_177] (rows=508200 width=1436)
-                                  Conds:RS_63._col1=RS_64._col0(Inner),Output:["_col0","_col1"]
-                                <-Map 15 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_63]
-                                    PartitionCols:_col1
-                                    Select Operator [SEL_39] (rows=462000 width=1436)
-                                      Output:["_col0","_col1"]
-                                      Filter Operator [FIL_164] (rows=462000 width=1436)
-                                        predicate:(i_item_id is not null and i_item_sk is not null)
-                                        TableScan [TS_37] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                                <-Reducer 20 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_64]
-                                    PartitionCols:_col0
-                                    Group By Operator [GBY_45] (rows=115500 width=1436)
-                                      Output:["_col0"],keys:KEY._col0
-                                    <-Map 19 [SIMPLE_EDGE]
-                                      SHUFFLE [RS_44]
-                                        PartitionCols:_col0
-                                        Group By Operator [GBY_43] (rows=231000 width=1436)
-                                          Output:["_col0"],keys:i_item_id
-                                          Select Operator [SEL_42] (rows=231000 width=1436)
-                                            Output:["i_item_id"]
-                                            Filter Operator [FIL_165] (rows=231000 width=1436)
-                                              predicate:((i_category) IN ('Children') and i_item_id is not null)
-                                              TableScan [TS_40] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_category"]
                             <-Reducer 23 [SIMPLE_EDGE]
                               SHUFFLE [RS_67]
                                 PartitionCols:_col4
@@ -240,14 +210,14 @@ Stage-0
                                   Output:["_col4","_col5"]
                                   Merge Join Operator [MERGEJOIN_179] (rows=348467716 width=135)
                                     Conds:RS_59._col1=RS_60._col0(Inner),Output:["_col2","_col3"]
-                                  <-Map 25 [SIMPLE_EDGE]
+                                  <-Map 26 [SIMPLE_EDGE]
                                     SHUFFLE [RS_60]
                                       PartitionCols:_col0
                                       Select Operator [SEL_55] (rows=20000000 width=1014)
                                         Output:["_col0"]
                                         Filter Operator [FIL_168] (rows=20000000 width=1014)
                                           predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
-                                          TableScan [TS_53] (rows=40000000 width=1014)
+                                          TableScan [TS_16] (rows=40000000 width=1014)
                                             default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
                                   <-Reducer 22 [SIMPLE_EDGE]
                                     SHUFFLE [RS_59]
@@ -255,6 +225,15 @@ Stage-0
                                       Merge Join Operator [MERGEJOIN_178] (rows=316788826 width=135)
                                         Conds:RS_56._col0=RS_57._col0(Inner),Output:["_col1","_col2","_col3"]
                                       <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_57]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_52] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_167] (rows=18262 width=1119)
+                                              predicate:((d_year = 1999) and (d_moy = 9) and d_date_sk is not null)
+                                              TableScan [TS_13] (rows=73049 width=1119)
+                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                      <-Map 27 [SIMPLE_EDGE]
                                         SHUFFLE [RS_56]
                                           PartitionCols:_col0
                                           Select Operator [SEL_49] (rows=287989836 width=135)
@@ -263,49 +242,69 @@ Stage-0
                                               predicate:(cs_sold_date_sk is not null and cs_bill_addr_sk is not null and cs_item_sk is not null)
                                               TableScan [TS_47] (rows=287989836 width=135)
                                                 default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_addr_sk","cs_item_sk","cs_ext_sales_price"]
-                                      <-Map 24 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_57]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_52] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_167] (rows=18262 width=1119)
-                                              predicate:((d_year = 1999) and (d_moy = 9) and d_date_sk is not null)
-                                              TableScan [TS_50] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                <-Reducer 29 [CONTAINS]
+                            <-Reducer 8 [SIMPLE_EDGE]
+                              SHUFFLE [RS_66]
+                                PartitionCols:_col0
+                                Merge Join Operator [MERGEJOIN_177] (rows=508200 width=1436)
+                                  Conds:RS_63._col1=RS_64._col0(Inner),Output:["_col0","_col1"]
+                                <-Map 1 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_63]
+                                    PartitionCols:_col1
+                                    Select Operator [SEL_39] (rows=462000 width=1436)
+                                      Output:["_col0","_col1"]
+                                      Filter Operator [FIL_164] (rows=462000 width=1436)
+                                        predicate:(i_item_id is not null and i_item_sk is not null)
+                                        TableScan [TS_0] (rows=462000 width=1436)
+                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
+                                <-Reducer 16 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_64]
+                                    PartitionCols:_col0
+                                    Group By Operator [GBY_45] (rows=115500 width=1436)
+                                      Output:["_col0"],keys:KEY._col0
+                                    <-Map 14 [SIMPLE_EDGE]
+                                      SHUFFLE [RS_44]
+                                        PartitionCols:_col0
+                                        Group By Operator [GBY_43] (rows=231000 width=1436)
+                                          Output:["_col0"],keys:i_item_id
+                                          Select Operator [SEL_42] (rows=231000 width=1436)
+                                            Output:["i_item_id"]
+                                            Filter Operator [FIL_165] (rows=231000 width=1436)
+                                              predicate:((i_category) IN ('Children') and i_item_id is not null)
+                                              TableScan [TS_3] (rows=462000 width=1436)
+                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_category"]
+                <-Reducer 13 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
                     Group By Operator [GBY_115] (rows=670816147 width=108)
                       Output:["_col0","_col1"],aggregations:["sum(_col1)"],keys:_col0
                       Group By Operator [GBY_111] (rows=95833781 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 28 [SIMPLE_EDGE]
+                      <-Reducer 12 [SIMPLE_EDGE]
                         SHUFFLE [RS_110]
                           PartitionCols:_col0
                           Group By Operator [GBY_109] (rows=191667562 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_185] (rows=191667562 width=135)
                               Conds:RS_105._col0=RS_106._col3(Inner),Output:["_col1","_col8"]
-                            <-Reducer 27 [SIMPLE_EDGE]
+                            <-Reducer 11 [SIMPLE_EDGE]
                               SHUFFLE [RS_105]
                                 PartitionCols:_col0
                                 Merge Join Operator [MERGEJOIN_180] (rows=508200 width=1436)
                                   Conds:RS_102._col1=RS_103._col0(Inner),Output:["_col0","_col1"]
-                                <-Map 26 [SIMPLE_EDGE]
+                                <-Map 1 [SIMPLE_EDGE]
                                   SHUFFLE [RS_102]
                                     PartitionCols:_col1
                                     Select Operator [SEL_78] (rows=462000 width=1436)
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_169] (rows=462000 width=1436)
                                         predicate:(i_item_id is not null and i_item_sk is not null)
-                                        TableScan [TS_76] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                                <-Reducer 31 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_0]
+                                <-Reducer 17 [SIMPLE_EDGE]
                                   SHUFFLE [RS_103]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_84] (rows=115500 width=1436)
                                       Output:["_col0"],keys:KEY._col0
-                                    <-Map 30 [SIMPLE_EDGE]
+                                    <-Map 14 [SIMPLE_EDGE]
                                       SHUFFLE [RS_83]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_82] (rows=231000 width=1436)
@@ -314,30 +313,36 @@ Stage-0
                                             Output:["i_item_id"]
                                             Filter Operator [FIL_170] (rows=231000 width=1436)
                                               predicate:((i_category) IN ('Children') and i_item_id is not null)
-                                              TableScan [TS_79] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_category"]
-                            <-Reducer 34 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_3]
+                            <-Reducer 25 [SIMPLE_EDGE]
                               SHUFFLE [RS_106]
                                 PartitionCols:_col3
                                 Select Operator [SEL_101] (rows=174243235 width=135)
                                   Output:["_col3","_col5"]
                                   Merge Join Operator [MERGEJOIN_182] (rows=174243235 width=135)
                                     Conds:RS_98._col2=RS_99._col0(Inner),Output:["_col1","_col3"]
-                                  <-Map 36 [SIMPLE_EDGE]
+                                  <-Map 26 [SIMPLE_EDGE]
                                     SHUFFLE [RS_99]
                                       PartitionCols:_col0
                                       Select Operator [SEL_94] (rows=20000000 width=1014)
                                         Output:["_col0"]
                                         Filter Operator [FIL_173] (rows=20000000 width=1014)
                                           predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
-                                          TableScan [TS_92] (rows=40000000 width=1014)
-                                            default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
-                                  <-Reducer 33 [SIMPLE_EDGE]
+                                           Please refer to the previous TableScan [TS_16]
+                                  <-Reducer 24 [SIMPLE_EDGE]
                                     SHUFFLE [RS_98]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_181] (rows=158402938 width=135)
                                         Conds:RS_95._col0=RS_96._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 32 [SIMPLE_EDGE]
+                                      <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_96]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_91] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_172] (rows=18262 width=1119)
+                                              predicate:((d_year = 1999) and (d_moy = 9) and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_13]
+                                      <-Map 28 [SIMPLE_EDGE]
                                         SHUFFLE [RS_95]
                                           PartitionCols:_col0
                                           Select Operator [SEL_88] (rows=144002668 width=135)
@@ -346,15 +351,6 @@ Stage-0
                                               predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null and ws_item_sk is not null)
                                               TableScan [TS_86] (rows=144002668 width=135)
                                                 default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_bill_addr_sk","ws_ext_sales_price"]
-                                      <-Map 35 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_96]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_91] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_172] (rows=18262 width=1119)
-                                              predicate:((d_year = 1999) and (d_moy = 9) and d_date_sk is not null)
-                                              TableScan [TS_89] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                 <-Reducer 4 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
@@ -369,45 +365,6 @@ Stage-0
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_183] (rows=766650239 width=88)
                               Conds:RS_29._col0=RS_30._col3(Inner),Output:["_col1","_col8"]
-                            <-Reducer 12 [SIMPLE_EDGE]
-                              SHUFFLE [RS_30]
-                                PartitionCols:_col3
-                                Select Operator [SEL_25] (rows=696954748 width=88)
-                                  Output:["_col3","_col5"]
-                                  Merge Join Operator [MERGEJOIN_176] (rows=696954748 width=88)
-                                    Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col3"]
-                                  <-Map 14 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_23]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_18] (rows=20000000 width=1014)
-                                        Output:["_col0"]
-                                        Filter Operator [FIL_163] (rows=20000000 width=1014)
-                                          predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
-                                          TableScan [TS_16] (rows=40000000 width=1014)
-                                            default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
-                                  <-Reducer 11 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_22]
-                                      PartitionCols:_col2
-                                      Merge Join Operator [MERGEJOIN_175] (rows=633595212 width=88)
-                                        Conds:RS_19._col0=RS_20._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 10 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_19]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_12] (rows=575995635 width=88)
-                                            Output:["_col0","_col1","_col2","_col3"]
-                                            Filter Operator [FIL_161] (rows=575995635 width=88)
-                                              predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null and ss_item_sk is not null)
-                                              TableScan [TS_10] (rows=575995635 width=88)
-                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
-                                      <-Map 13 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_20]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_15] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_162] (rows=18262 width=1119)
-                                              predicate:((d_year = 1999) and (d_moy = 9) and d_date_sk is not null)
-                                              TableScan [TS_13] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                             <-Reducer 2 [SIMPLE_EDGE]
                               SHUFFLE [RS_29]
                                 PartitionCols:_col0
@@ -420,14 +377,13 @@ Stage-0
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_159] (rows=462000 width=1436)
                                         predicate:(i_item_id is not null and i_item_sk is not null)
-                                        TableScan [TS_0] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                                <-Reducer 9 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_0]
+                                <-Reducer 15 [SIMPLE_EDGE]
                                   SHUFFLE [RS_27]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_8] (rows=115500 width=1436)
                                       Output:["_col0"],keys:KEY._col0
-                                    <-Map 8 [SIMPLE_EDGE]
+                                    <-Map 14 [SIMPLE_EDGE]
                                       SHUFFLE [RS_7]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_6] (rows=231000 width=1436)
@@ -436,6 +392,42 @@ Stage-0
                                             Output:["i_item_id"]
                                             Filter Operator [FIL_160] (rows=231000 width=1436)
                                               predicate:((i_category) IN ('Children') and i_item_id is not null)
-                                              TableScan [TS_3] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_category"]
+                                               Please refer to the previous TableScan [TS_3]
+                            <-Reducer 20 [SIMPLE_EDGE]
+                              SHUFFLE [RS_30]
+                                PartitionCols:_col3
+                                Select Operator [SEL_25] (rows=696954748 width=88)
+                                  Output:["_col3","_col5"]
+                                  Merge Join Operator [MERGEJOIN_176] (rows=696954748 width=88)
+                                    Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col3"]
+                                  <-Map 26 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_23]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_18] (rows=20000000 width=1014)
+                                        Output:["_col0"]
+                                        Filter Operator [FIL_163] (rows=20000000 width=1014)
+                                          predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
+                                           Please refer to the previous TableScan [TS_16]
+                                  <-Reducer 19 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_22]
+                                      PartitionCols:_col2
+                                      Merge Join Operator [MERGEJOIN_175] (rows=633595212 width=88)
+                                        Conds:RS_19._col0=RS_20._col0(Inner),Output:["_col1","_col2","_col3"]
+                                      <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_20]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_15] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_162] (rows=18262 width=1119)
+                                              predicate:((d_year = 1999) and (d_moy = 9) and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_13]
+                                      <-Map 18 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_19]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_12] (rows=575995635 width=88)
+                                            Output:["_col0","_col1","_col2","_col3"]
+                                            Filter Operator [FIL_161] (rows=575995635 width=88)
+                                              predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null and ss_item_sk is not null)
+                                              TableScan [TS_10] (rows=575995635 width=88)
+                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
 


[28/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/59f65772
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/59f65772
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/59f65772

Branch: refs/heads/hive-14535
Commit: 59f6577296bfcdb5d8e74657f7eb6d6294630b23
Parents: f2fa83c
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Thu May 4 20:27:40 2017 +0100
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Sat May 13 08:09:14 2017 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |    4 +
 .../hive/ql/optimizer/ConvertJoinMapJoin.java   |    9 +-
 .../hive/ql/optimizer/MapJoinProcessor.java     |   11 +-
 .../ql/optimizer/ReduceSinkMapJoinProc.java     |    6 +-
 .../hive/ql/optimizer/SharedScanOptimizer.java  |  625 ++++++
 .../physical/GenMRSkewJoinProcessor.java        |    2 +-
 .../physical/GenSparkSkewJoinProcessor.java     |    2 +-
 .../apache/hadoop/hive/ql/parse/GenTezWork.java |   10 +-
 .../hadoop/hive/ql/parse/TezCompiler.java       |   75 +-
 .../apache/hadoop/hive/ql/plan/JoinDesc.java    |   12 +
 .../apache/hadoop/hive/ql/plan/MapJoinDesc.java |    3 +-
 .../test/queries/clientpositive/perf/query88.q  |    2 +
 .../clientpositive/llap/auto_join0.q.out        |   15 +-
 .../clientpositive/llap/auto_join30.q.out       |   67 +-
 .../llap/auto_sortmerge_join_9.q.out            |   70 +-
 .../llap/bucket_map_join_tez1.q.out             |   11 +-
 .../llap/correlationoptimizer2.q.out            |   26 +-
 .../llap/correlationoptimizer3.q.out            |  188 +-
 .../llap/correlationoptimizer6.q.out            |  104 +-
 .../llap/dynamic_partition_pruning.q.out        |   92 +-
 .../clientpositive/llap/except_distinct.q.out   |   92 +-
 .../clientpositive/llap/explainuser_1.q.out     |  137 +-
 .../clientpositive/llap/explainuser_2.q.out     |  772 ++++---
 .../clientpositive/llap/intersect_merge.q.out   |   83 +-
 .../results/clientpositive/llap/join46.q.out    |   38 +-
 .../llap/limit_join_transpose.q.out             |  242 +--
 .../clientpositive/llap/limit_pushdown.q.out    |   17 +-
 .../clientpositive/llap/llap_nullscan.q.out     |   15 +-
 .../results/clientpositive/llap/mapjoin46.q.out |   43 +-
 .../test/results/clientpositive/llap/mrr.q.out  |   82 +-
 .../clientpositive/llap/multiMapJoin2.q.out     |  209 +-
 .../llap/offset_limit_ppd_optimizer.q.out       |   17 +-
 .../clientpositive/llap/subquery_in.q.out       |  173 +-
 .../clientpositive/llap/subquery_multi.q.out    |  617 +++---
 .../clientpositive/llap/subquery_notin.q.out    | 1539 ++++++-------
 .../clientpositive/llap/subquery_null_agg.q.out |   26 +-
 .../clientpositive/llap/subquery_scalar.q.out   |  359 ++--
 .../clientpositive/llap/subquery_select.q.out   |  957 ++++-----
 .../clientpositive/llap/subquery_views.q.out    |  274 +--
 .../clientpositive/llap/tez_join_tests.q.out    |   29 +-
 .../clientpositive/llap/tez_joins_explain.q.out |   29 +-
 .../clientpositive/llap/unionDistinct_1.q.out   |  261 +--
 .../clientpositive/llap/union_top_level.q.out   |   50 +-
 .../llap/vector_groupby_grouping_sets4.q.out    |   43 +-
 .../llap/vector_groupby_mapjoin.q.out           |   26 +-
 .../clientpositive/llap/vector_join30.q.out     |  122 +-
 .../vectorized_dynamic_partition_pruning.q.out  |  108 +-
 .../results/clientpositive/perf/query1.q.out    |   88 +-
 .../results/clientpositive/perf/query14.q.out   | 2032 +++++++++---------
 .../results/clientpositive/perf/query16.q.out   |   27 +-
 .../results/clientpositive/perf/query17.q.out   |   68 +-
 .../results/clientpositive/perf/query23.q.out   |  477 ++--
 .../results/clientpositive/perf/query25.q.out   |   68 +-
 .../results/clientpositive/perf/query28.q.out   |   87 +-
 .../results/clientpositive/perf/query29.q.out   |   19 +-
 .../results/clientpositive/perf/query30.q.out   |  114 +-
 .../results/clientpositive/perf/query31.q.out   |  270 ++-
 .../results/clientpositive/perf/query32.q.out   |   34 +-
 .../results/clientpositive/perf/query33.q.out   |  252 ++-
 .../results/clientpositive/perf/query38.q.out   |   94 +-
 .../results/clientpositive/perf/query39.q.out   |   58 +-
 .../results/clientpositive/perf/query46.q.out   |   75 +-
 .../results/clientpositive/perf/query5.q.out    |   70 +-
 .../results/clientpositive/perf/query51.q.out   |   83 +-
 .../results/clientpositive/perf/query56.q.out   |  252 ++-
 .../results/clientpositive/perf/query58.q.out   |  120 +-
 .../results/clientpositive/perf/query6.q.out    |   19 +-
 .../results/clientpositive/perf/query60.q.out   |  252 ++-
 .../results/clientpositive/perf/query64.q.out   |  443 ++--
 .../results/clientpositive/perf/query65.q.out   |   86 +-
 .../results/clientpositive/perf/query66.q.out   |   90 +-
 .../results/clientpositive/perf/query68.q.out   |   75 +-
 .../results/clientpositive/perf/query69.q.out   |   76 +-
 .../results/clientpositive/perf/query70.q.out   |   44 +-
 .../results/clientpositive/perf/query75.q.out   |  258 ++-
 .../results/clientpositive/perf/query76.q.out   |  126 +-
 .../results/clientpositive/perf/query80.q.out   |  106 +-
 .../results/clientpositive/perf/query81.q.out   |   57 +-
 .../results/clientpositive/perf/query83.q.out   |  225 +-
 .../results/clientpositive/perf/query85.q.out   |   11 +-
 .../results/clientpositive/perf/query87.q.out   |   94 +-
 .../results/clientpositive/perf/query88.q.out   |  358 ++-
 .../results/clientpositive/perf/query9.q.out    |  170 +-
 .../results/clientpositive/perf/query90.q.out   |   58 +-
 .../results/clientpositive/perf/query92.q.out   |   35 +-
 .../results/clientpositive/perf/query95.q.out   |   70 +-
 .../results/clientpositive/perf/query97.q.out   |   35 +-
 .../clientpositive/tez/explainanalyze_2.q.out   |  412 ++--
 .../clientpositive/tez/explainanalyze_3.q.out   |    8 +-
 .../clientpositive/tez/explainuser_3.q.out      |    8 +-
 90 files changed, 7086 insertions(+), 8012 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 73e0290..d6a80ae 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1645,6 +1645,10 @@ public class HiveConf extends Configuration {
         "If the skew information is correctly stored in the metadata, hive.optimize.skewjoin.compiletime\n" +
         "would change the query plan to take care of it, and hive.optimize.skewjoin will be a no-op."),
 
+    HIVE_SHARED_SCAN_OPTIMIZATION("hive.optimize.shared.scan", true,
+        "Whether to enable shared scan optimizer. The optimizer finds scan operator over the same table\n" +
+        "in the query plan and merges them if they meet some preconditions."),
+
     // CTE
     HIVE_CTE_MATERIALIZE_THRESHOLD("hive.optimize.cte.materialize.threshold", -1,
         "If the number of references to a CTE clause exceeds this threshold, Hive will materialize it\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
index d0fdb52..0eec78e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
@@ -281,7 +281,8 @@ public class ConvertJoinMapJoin implements NodeProcessor {
                   joinOp.getConf().getBaseSrc(), joinOp).getSecond(),
                   null, joinDesc.getExprs(), null, null,
                   joinDesc.getOutputColumnNames(), mapJoinConversionPos, joinDesc.getConds(),
-                  joinDesc.getFilters(), joinDesc.getNoOuterJoin(), null, joinDesc.getNoConditionalTaskSize());
+                  joinDesc.getFilters(), joinDesc.getNoOuterJoin(), null,
+                  joinDesc.getNoConditionalTaskSize(), joinDesc.getInMemoryDataSize());
       mapJoinDesc.setNullSafes(joinDesc.getNullSafes());
       mapJoinDesc.setFilterMap(joinDesc.getFilterMap());
       mapJoinDesc.setResidualFilterExprs(joinDesc.getResidualFilterExprs());
@@ -419,7 +420,6 @@ public class ConvertJoinMapJoin implements NodeProcessor {
       // each side better have 0 or more RS. if either side is unbalanced, cannot convert.
       // This is a workaround for now. Right fix would be to refactor code in the
       // MapRecordProcessor and ReduceRecordProcessor with respect to the sources.
-      @SuppressWarnings({"rawtypes","unchecked"})
       Set<ReduceSinkOperator> set =
           OperatorUtils.findOperatorsUpstream(parentOp.getParentOperators(),
               ReduceSinkOperator.class);
@@ -719,6 +719,11 @@ public class ConvertJoinMapJoin implements NodeProcessor {
 
     }
 
+    // We store the total memory that this MapJoin is going to use,
+    // which is calculated as totalSize/buckets, with totalSize
+    // equal to sum of small tables size.
+    joinOp.getConf().setInMemoryDataSize(totalSize/buckets);
+
     return bigTablePosition;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
index 85d46f3..f01fb9c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
@@ -29,8 +29,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -78,6 +76,8 @@ import org.apache.hadoop.hive.ql.plan.SelectDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implementation of one of the rule-based map join optimization. User passes hints to specify
@@ -432,7 +432,7 @@ public class MapJoinProcessor extends Transform {
         smbJoinDesc.getOutputColumnNames(),
         bigTablePos, smbJoinDesc.getConds(),
         smbJoinDesc.getFilters(), smbJoinDesc.isNoOuterJoin(), smbJoinDesc.getDumpFilePrefix(),
-        smbJoinDesc.getNoConditionalTaskSize());
+        smbJoinDesc.getNoConditionalTaskSize(), smbJoinDesc.getInMemoryDataSize());
 
     mapJoinDesc.setStatistics(smbJoinDesc.getStatistics());
 
@@ -1184,8 +1184,9 @@ public class MapJoinProcessor extends Transform {
     JoinCondDesc[] joinCondns = op.getConf().getConds();
     MapJoinDesc mapJoinDescriptor =
         new MapJoinDesc(keyExprMap, keyTableDesc, newValueExprs, valueTableDescs,
-            valueFilteredTableDescs, outputColumnNames, mapJoinPos, joinCondns, filters, op
-                .getConf().getNoOuterJoin(), dumpFilePrefix, op.getConf().getNoConditionalTaskSize());
+            valueFilteredTableDescs, outputColumnNames, mapJoinPos, joinCondns, filters,
+            op.getConf().getNoOuterJoin(), dumpFilePrefix,
+            op.getConf().getNoConditionalTaskSize(), op.getConf().getInMemoryDataSize());
     mapJoinDescriptor.setStatistics(op.getConf().getStatistics());
     mapJoinDescriptor.setTagOrder(tagOrder);
     mapJoinDescriptor.setNullSafes(desc.getNullSafes());

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
index 3a6baca..ac234d0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java
@@ -219,8 +219,8 @@ public class ReduceSinkMapJoinProc implements NodeProcessor {
     if (tableSize == 0) {
       tableSize = 1;
     }
-    LOG.info("Mapjoin " + mapJoinOp + "(bucket map join = )" + joinConf.isBucketMapJoin()
-    + ", pos: " + pos + " --> " + parentWork.getName() + " (" + keyCount
+    LOG.info("Mapjoin " + mapJoinOp + "(bucket map join = " + joinConf.isBucketMapJoin()
+    + "), pos: " + pos + " --> " + parentWork.getName() + " (" + keyCount
     + " keys estimated from " + rowCount + " rows, " + bucketCount + " buckets)");
     joinConf.getParentToInput().put(pos, parentWork.getName());
     if (keyCount != Long.MAX_VALUE) {
@@ -290,7 +290,7 @@ public class ReduceSinkMapJoinProc implements NodeProcessor {
 
         ReduceSinkOperator r = null;
         if (context.connectedReduceSinks.contains(parentRS)) {
-          LOG.debug("Cloning reduce sink for multi-child broadcast edge");
+          LOG.debug("Cloning reduce sink " + parentRS + " for multi-child broadcast edge");
           // we've already set this one up. Need to clone for the next work.
           r = (ReduceSinkOperator) OperatorFactory.getAndMakeChild(
               parentRS.getCompilationOpContext(),

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
new file mode 100644
index 0000000..d04fc64
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
@@ -0,0 +1,625 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator;
+import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorFactory;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UnionOperator;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.SemiJoinBranchInfo;
+import org.apache.hadoop.hive.ql.plan.DynamicPruningEventDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Multimap;
+
+/**
+ * Shared scan optimizer. This rule finds scan operator over the same table
+ * in the query plan and merges them if they meet some preconditions.
+ *
+ *  TS   TS             TS
+ *  |    |     ->      /  \
+ *  Op   Op           Op  Op
+ *
+ * <p>Currently it only works with the Tez execution engine.
+ */
+public class SharedScanOptimizer extends Transform {
+
+  private final static Logger LOG = LoggerFactory.getLogger(SharedScanOptimizer.class);
+
+  @Override
+  public ParseContext transform(ParseContext pctx) throws SemanticException {
+
+    final Map<String, TableScanOperator> topOps = pctx.getTopOps();
+    if (topOps.size() < 2) {
+      // Nothing to do, bail out
+      return pctx;
+    }
+
+    // Cache to use during optimization
+    SharedScanOptimizerCache optimizerCache = new SharedScanOptimizerCache();
+
+    // We will not apply this optimization on some table scan operators.
+    Set<TableScanOperator> excludeTableScanOps = gatherNotValidTableScanOps(pctx, optimizerCache);
+    LOG.debug("Exclude TableScan ops: {}", excludeTableScanOps);
+
+    // Map of dbName.TblName -> Pair(tableAlias, TSOperator)
+    Multimap<String, Entry<String, TableScanOperator>> tableNameToOps = splitTableScanOpsByTable(pctx);
+
+    // We enforce a certain order when we do the reutilization.
+    // In particular, we use size of table x number of reads to
+    // rank the tables.
+    List<Entry<String, Long>> sortedTables = rankTablesByAccumulatedSize(pctx, excludeTableScanOps);
+    LOG.debug("Sorted tables by size: {}", sortedTables);
+
+    // Execute optimization
+    Multimap<String, TableScanOperator> existingOps = ArrayListMultimap.create();
+    Set<String> entriesToRemove = new HashSet<>();
+    for (Entry<String, Long> tablePair : sortedTables) {
+      for (Entry<String, TableScanOperator> tableScanOpPair : tableNameToOps.get(tablePair.getKey())) {
+        TableScanOperator tsOp = tableScanOpPair.getValue();
+        if (excludeTableScanOps.contains(tsOp)) {
+          // Skip operator, currently we do not merge
+          continue;
+        }
+        String tableName = tablePair.getKey();
+        Collection<TableScanOperator> prevTsOps = existingOps.get(tableName);
+        if (!prevTsOps.isEmpty()) {
+          for (TableScanOperator prevTsOp : prevTsOps) {
+
+            // First we check if the two table scan operators can actually be merged
+            // If schemas do not match, we currently do not merge
+            List<String> prevTsOpNeededColumns = prevTsOp.getNeededColumns();
+            List<String> tsOpNeededColumns = tsOp.getNeededColumns();
+            if (prevTsOpNeededColumns.size() != tsOpNeededColumns.size()) {
+              // Skip
+              continue;
+            }
+            boolean notEqual = false;
+            for (int i = 0; i < prevTsOpNeededColumns.size(); i++) {
+              if (!prevTsOpNeededColumns.get(i).equals(tsOpNeededColumns.get(i))) {
+                notEqual = true;
+                break;
+              }
+            }
+            if (notEqual) {
+              // Skip
+              continue;
+            }
+            // If row limit does not match, we currently do not merge
+            if (prevTsOp.getConf().getRowLimit() != tsOp.getConf().getRowLimit()) {
+              // Skip
+              continue;
+            }
+
+            // It seems these two operators can be merged.
+            // Check that plan meets some preconditions before doing it.
+            // In particular, in the presence of map joins in the upstream plan:
+            // - we cannot exceed the noconditional task size, and
+            // - if we already merged the big table, we cannot merge the broadcast
+            // tables.
+            if (!validPreConditions(pctx, optimizerCache, prevTsOp, tsOp)) {
+              // Skip
+              LOG.debug("{} does not meet preconditions", tsOp);
+              continue;
+            }
+
+            // We can merge
+            ExprNodeGenericFuncDesc exprNode = null;
+            if (prevTsOp.getConf().getFilterExpr() != null) {
+              // Push filter on top of children
+              pushFilterToTopOfTableScan(optimizerCache, prevTsOp);
+              // Clone to push to table scan
+              exprNode = (ExprNodeGenericFuncDesc) prevTsOp.getConf().getFilterExpr();
+            }
+            if (tsOp.getConf().getFilterExpr() != null) {
+              // Push filter on top
+              pushFilterToTopOfTableScan(optimizerCache, tsOp);
+              ExprNodeGenericFuncDesc tsExprNode = tsOp.getConf().getFilterExpr();
+              if (exprNode != null && !exprNode.isSame(tsExprNode)) {
+                // We merge filters from previous scan by ORing with filters from current scan
+                if (exprNode.getGenericUDF() instanceof GenericUDFOPOr) {
+                  List<ExprNodeDesc> newChildren = new ArrayList<>(exprNode.getChildren().size() + 1);
+                  for (ExprNodeDesc childExprNode : exprNode.getChildren()) {
+                    if (childExprNode.isSame(tsExprNode)) {
+                      // We do not need to do anything, it is in the OR expression
+                      break;
+                    }
+                    newChildren.add(childExprNode);
+                  }
+                  if (exprNode.getChildren().size() == newChildren.size()) {
+                    newChildren.add(tsExprNode);
+                    exprNode = ExprNodeGenericFuncDesc.newInstance(
+                            new GenericUDFOPOr(),
+                            newChildren);
+                  }
+                } else {
+                  exprNode = ExprNodeGenericFuncDesc.newInstance(
+                          new GenericUDFOPOr(),
+                          Arrays.<ExprNodeDesc>asList(exprNode, tsExprNode));
+                }
+              }
+            }
+            // Replace filter
+            prevTsOp.getConf().setFilterExpr(exprNode);
+            // Replace table scan operator
+            List<Operator<? extends OperatorDesc>> allChildren =
+                    Lists.newArrayList(tsOp.getChildOperators());
+            for (Operator<? extends OperatorDesc> op : allChildren) {
+              tsOp.getChildOperators().remove(op);
+              op.replaceParent(tsOp, prevTsOp);
+              prevTsOp.getChildOperators().add(op);
+            }
+            entriesToRemove.add(tableScanOpPair.getKey());
+            // Remove and combine
+            optimizerCache.removeOpAndCombineWork(tsOp, prevTsOp);
+
+            LOG.debug("Merged {} into {}", tsOp, prevTsOp);
+
+            break;
+          }
+          if (!entriesToRemove.contains(tableScanOpPair.getKey())) {
+            existingOps.put(tableName, tsOp);
+          }
+        } else {
+          // Add to existing ops
+          existingOps.put(tableName, tsOp);
+        }
+      }
+    }
+    // Remove unused operators
+    for (String key : entriesToRemove) {
+      topOps.remove(key);
+    }
+
+    return pctx;
+  }
+
+  private static Set<TableScanOperator> gatherNotValidTableScanOps(
+          ParseContext pctx, SharedScanOptimizerCache optimizerCache) {
+    // Find TS operators with partition pruning enabled in plan
+    // because these TS may potentially read different data for
+    // different pipeline.
+    // These can be:
+    // 1) TS with static partitioning.
+    //    TODO: Check partition list of different TS and do not add if they are identical
+    // 2) TS with DPP.
+    //    TODO: Check if dynamic filters are identical and do not add.
+    // 3) TS with semijoin DPP.
+    //    TODO: Check for dynamic filters.
+    Set<TableScanOperator> notValidTableScanOps = new HashSet<>();
+    // 1) TS with static partitioning.
+    Map<String, TableScanOperator> topOps = pctx.getTopOps();
+    for (TableScanOperator tsOp : topOps.values()) {
+      if (tsOp.getConf().getPartColumns() != null &&
+              !tsOp.getConf().getPartColumns().isEmpty()) {
+        notValidTableScanOps.add(tsOp);
+      }
+    }
+    // 2) TS with DPP.
+    Collection<Operator<? extends OperatorDesc>> tableScanOps =
+            Lists.<Operator<?>>newArrayList(topOps.values());
+    Set<AppMasterEventOperator> s =
+            OperatorUtils.findOperators(tableScanOps, AppMasterEventOperator.class);
+    for (AppMasterEventOperator a : s) {
+      if (a.getConf() instanceof DynamicPruningEventDesc) {
+        DynamicPruningEventDesc dped = (DynamicPruningEventDesc) a.getConf();
+        notValidTableScanOps.add(dped.getTableScan());
+        optimizerCache.tableScanToDPPSource.put(dped.getTableScan(), a);
+      }
+    }
+    // 3) TS with semijoin DPP.
+    for (Entry<ReduceSinkOperator, SemiJoinBranchInfo> e
+            : pctx.getRsToSemiJoinBranchInfo().entrySet()) {
+      notValidTableScanOps.add(e.getValue().getTsOp());
+      optimizerCache.tableScanToDPPSource.put(e.getValue().getTsOp(), e.getKey());
+    }
+    return notValidTableScanOps;
+  }
+
+  private static Multimap<String, Entry<String, TableScanOperator>> splitTableScanOpsByTable(
+          ParseContext pctx) {
+    Multimap<String, Entry<String, TableScanOperator>> tableNameToOps = ArrayListMultimap.create();
+    for (Entry<String, TableScanOperator> e : pctx.getTopOps().entrySet()) {
+      TableScanOperator tsOp = e.getValue();
+      tableNameToOps.put(
+              tsOp.getConf().getTableMetadata().getDbName() + "."
+                      + tsOp.getConf().getTableMetadata().getTableName(), e);
+    }
+    return tableNameToOps;
+  }
+
+  private static List<Entry<String, Long>> rankTablesByAccumulatedSize(ParseContext pctx,
+          Set<TableScanOperator> excludeTables) {
+    Map<String, Long> tableToTotalSize = new HashMap<>();
+    for (Entry<String, TableScanOperator> e : pctx.getTopOps().entrySet()) {
+      TableScanOperator tsOp = e.getValue();
+      if (excludeTables.contains(tsOp)) {
+        // Skip operator, currently we do not merge
+        continue;
+      }
+      String tableName = tsOp.getConf().getTableMetadata().getDbName() + "."
+              + tsOp.getConf().getTableMetadata().getTableName();
+      long tableSize = tsOp.getStatistics() != null ?
+              tsOp.getStatistics().getDataSize() : 0L;
+      Long totalSize = tableToTotalSize.get(tableName);
+      if (totalSize != null) {
+        tableToTotalSize.put(tableName,
+                StatsUtils.safeAdd(totalSize, tableSize));
+      } else {
+        tableToTotalSize.put(tableName, tableSize);
+      }
+    }
+    List<Entry<String, Long>> sortedTables =
+            new LinkedList<>(tableToTotalSize.entrySet());
+    Collections.sort(sortedTables, Collections.reverseOrder(
+            new Comparator<Map.Entry<String, Long>>() {
+              public int compare(Map.Entry<String, Long> o1, Map.Entry<String, Long> o2) {
+                return (o1.getValue()).compareTo(o2.getValue());
+              }
+            }));
+    return sortedTables;
+  }
+
+  private static boolean validPreConditions(ParseContext pctx, SharedScanOptimizerCache optimizerCache,
+          TableScanOperator prevTsOp, TableScanOperator tsOp) {
+    // 1) The set of operators in the works of the TS operators need to meet
+    // some requirements. In particular:
+    // 1.1. None of the works that contain the TS operators can contain a Union
+    // operator. This is not supported yet as we might end up with cycles in
+    // the Tez DAG.
+    // 1.2. There cannot be more than one DummyStore operator in the new resulting
+    // work when the TS operators are merged. This is due to an assumption in
+    // MergeJoinProc that needs to be further explored.
+    // If any of these conditions are not met, we cannot merge.
+    // TODO: Extend rule so it can be apply for these cases.
+    final Set<Operator<?>> workOps1 = findWorkOperators(optimizerCache, prevTsOp);
+    final Set<Operator<?>> workOps2 = findWorkOperators(optimizerCache, tsOp);
+    boolean foundDummyStoreOp = false;
+    for (Operator<?> op : workOps1) {
+      if (op instanceof UnionOperator) {
+        // We cannot merge (1.1)
+        return false;
+      }
+      if (op instanceof DummyStoreOperator) {
+        foundDummyStoreOp = true;
+      }
+    }
+    for (Operator<?> op : workOps2) {
+      if (op instanceof UnionOperator) {
+        // We cannot merge (1.1)
+        return false;
+      }
+      if (foundDummyStoreOp && op instanceof DummyStoreOperator) {
+        // We cannot merge (1.2)
+        return false;
+      }
+    }
+    // 2) We check whether output works when we merge the operators will collide.
+    //
+    //   Work1   Work2    (merge TS in W1 & W2)        Work1
+    //       \   /                  ->                  | |       X
+    //       Work3                                     Work3
+    //
+    // If we do, we cannot merge. The reason is that Tez currently does
+    // not support parallel edges, i.e., multiple edges from same work x
+    // into same work y.
+    final Set<Operator<?>> outputWorksOps1 = findChildWorkOperators(pctx, optimizerCache, prevTsOp);
+    final Set<Operator<?>> outputWorksOps2 = findChildWorkOperators(pctx, optimizerCache, tsOp);
+    if (!Collections.disjoint(outputWorksOps1, outputWorksOps2)) {
+      // We cannot merge
+      return false;
+    }
+    // 3) We check whether we will end up with same operators inputing on same work.
+    //
+    //       Work1        (merge TS in W2 & W3)        Work1
+    //       /   \                  ->                  | |       X
+    //   Work2   Work3                                 Work2
+    //
+    // If we do, we cannot merge. The reason is the same as above, currently
+    // Tez currently does not support parallel edges.
+    final Set<Operator<?>> inputWorksOps1 = findParentWorkOperators(pctx, optimizerCache, prevTsOp);
+    final Set<Operator<?>> inputWorksOps2 = findParentWorkOperators(pctx, optimizerCache, tsOp);
+    if (!Collections.disjoint(inputWorksOps1, inputWorksOps2)) {
+      // We cannot merge
+      return false;
+    }
+    // 4) We check whether one of the operators is part of a work that is an input for
+    // the work of the other operator.
+    //
+    //   Work1            (merge TS in W1 & W3)        Work1
+    //     |                        ->                   |        X
+    //   Work2                                         Work2
+    //     |                                             |
+    //   Work3                                         Work1
+    //
+    // If we do, we cannot merge, as we would end up with a cycle in the DAG.
+    final Set<Operator<?>> descendantWorksOps1 =
+            findDescendantWorkOperators(pctx, optimizerCache, prevTsOp);
+    final Set<Operator<?>> descendantWorksOps2 =
+            findDescendantWorkOperators(pctx, optimizerCache, tsOp);
+    if (!Collections.disjoint(descendantWorksOps1, workOps2)
+            || !Collections.disjoint(workOps1, descendantWorksOps2)) {
+      return false;
+    }
+    // 5) We check whether merging the works would cause the size of
+    // the data in memory grow too large.
+    // TODO: Currently ignores GBY and PTF which may also buffer data in memory.
+    final Set<Operator<?>> newWorkOps = workOps1;
+    newWorkOps.addAll(workOps2);
+    long dataSize = 0L;
+    for (Operator<?> op : newWorkOps) {
+      if (op instanceof MapJoinOperator) {
+        MapJoinOperator mop = (MapJoinOperator) op;
+        dataSize = StatsUtils.safeAdd(dataSize, mop.getConf().getInMemoryDataSize());
+        if (dataSize > mop.getConf().getNoConditionalTaskSize()) {
+          // Size surpasses limit, we cannot convert
+          LOG.debug("accumulated data size: {} / max size: {}",
+                  dataSize, mop.getConf().getNoConditionalTaskSize());
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+
+  private static Set<Operator<?>> findParentWorkOperators(ParseContext pctx,
+          SharedScanOptimizerCache optimizerCache, Operator<?> start) {
+    // Find operators in work
+    Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start);
+    // Gather input works operators
+    Set<Operator<?>> set = new HashSet<Operator<?>>();
+    for (Operator<?> op : workOps) {
+      if (op.getParentOperators() != null) {
+        for (Operator<?> parent : op.getParentOperators()) {
+          if (parent instanceof ReduceSinkOperator) {
+            set.addAll(findWorkOperators(optimizerCache, parent));
+          }
+        }
+      } else if (op instanceof TableScanOperator) {
+        // Check for DPP and semijoin DPP
+        for (Operator<?> parent : optimizerCache.tableScanToDPPSource.get((TableScanOperator) op)) {
+          set.addAll(findWorkOperators(optimizerCache, parent));
+        }
+      }
+    }
+    return set;
+  }
+
+  private static Set<Operator<?>> findChildWorkOperators(ParseContext pctx,
+          SharedScanOptimizerCache optimizerCache, Operator<?> start) {
+    // Find operators in work
+    Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start);
+    // Gather output works operators
+    Set<Operator<?>> set = new HashSet<Operator<?>>();
+    for (Operator<?> op : workOps) {
+      if (op instanceof ReduceSinkOperator) {
+        if (op.getChildOperators() != null) {
+          // All children of RS are descendants
+          for (Operator<?> child : op.getChildOperators()) {
+            set.addAll(findWorkOperators(optimizerCache, child));
+          }
+        }
+        // Semijoin DPP work is considered an child because work needs
+        // to finish for it to execute
+        SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op);
+        if (sjbi != null) {
+          set.addAll(findWorkOperators(optimizerCache, sjbi.getTsOp()));
+        }
+      } else if(op.getConf() instanceof DynamicPruningEventDesc) {
+        // DPP work is considered an child because work needs
+        // to finish for it to execute
+        set.addAll(findWorkOperators(
+                optimizerCache, ((DynamicPruningEventDesc) op.getConf()).getTableScan()));
+      }
+    }
+    return set;
+  }
+
+  private static Set<Operator<?>> findDescendantWorkOperators(ParseContext pctx,
+          SharedScanOptimizerCache optimizerCache, Operator<?> start) {
+    // Find operators in work
+    Set<Operator<?>> workOps = findWorkOperators(optimizerCache, start);
+    // Gather output works operators
+    Set<Operator<?>> result = new HashSet<Operator<?>>();
+    Set<Operator<?>> set;
+    while (!workOps.isEmpty()) {
+      set = new HashSet<Operator<?>>();
+      for (Operator<?> op : workOps) {
+        if (op instanceof ReduceSinkOperator) {
+          if (op.getChildOperators() != null) {
+            // All children of RS are descendants
+            for (Operator<?> child : op.getChildOperators()) {
+              set.addAll(findWorkOperators(optimizerCache, child));
+            }
+          }
+          // Semijoin DPP work is considered a descendant because work needs
+          // to finish for it to execute
+          SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op);
+          if (sjbi != null) {
+            set.addAll(findWorkOperators(optimizerCache, sjbi.getTsOp()));
+          }
+        } else if(op.getConf() instanceof DynamicPruningEventDesc) {
+          // DPP work is considered a descendant because work needs
+          // to finish for it to execute
+          set.addAll(findWorkOperators(
+                  optimizerCache, ((DynamicPruningEventDesc) op.getConf()).getTableScan()));
+        }
+      }
+      workOps = set;
+      result.addAll(set);
+    }
+    return result;
+  }
+
+  // Stores result in cache
+  private static Set<Operator<?>> findWorkOperators(
+          SharedScanOptimizerCache optimizerCache, Operator<?> start) {
+    Set<Operator<?>> c = optimizerCache.operatorToWorkOperators.get(start);
+    if (!c.isEmpty()) {
+      return c;
+    }
+    c = findWorkOperators(start, new HashSet<Operator<?>>());
+    for (Operator<?> op : c) {
+      optimizerCache.operatorToWorkOperators.putAll(op, c);
+    }
+    return c;
+  }
+
+  private static Set<Operator<?>> findWorkOperators(Operator<?> start, Set<Operator<?>> found) {
+    found.add(start);
+    if (start.getParentOperators() != null) {
+      for (Operator<?> parent : start.getParentOperators()) {
+        if (parent instanceof ReduceSinkOperator) {
+          continue;
+        }
+        if (!found.contains(parent)) {
+          findWorkOperators(parent, found);
+        }
+      }
+    }
+    if (start instanceof ReduceSinkOperator) {
+      return found;
+    }
+    if (start.getChildOperators() != null) {
+      for (Operator<?> child : start.getChildOperators()) {
+        if (!found.contains(child)) {
+          findWorkOperators(child, found);
+        }
+      }
+    }
+    return found;
+  }
+
+  private static void pushFilterToTopOfTableScan(
+          SharedScanOptimizerCache optimizerCache, TableScanOperator tsOp)
+                  throws UDFArgumentException {
+    ExprNodeGenericFuncDesc tableScanExprNode = tsOp.getConf().getFilterExpr();
+    List<Operator<? extends OperatorDesc>> allChildren =
+            Lists.newArrayList(tsOp.getChildOperators());
+    for (Operator<? extends OperatorDesc> op : allChildren) {
+      if (op instanceof FilterOperator) {
+        FilterOperator filterOp = (FilterOperator) op;
+        ExprNodeDesc filterExprNode  = filterOp.getConf().getPredicate();
+        if (tableScanExprNode.isSame(filterExprNode)) {
+          // We do not need to do anything
+          return;
+        }
+        if (tableScanExprNode.getGenericUDF() instanceof GenericUDFOPOr) {
+          for (ExprNodeDesc childExprNode : tableScanExprNode.getChildren()) {
+            if (childExprNode.isSame(filterExprNode)) {
+              // We do not need to do anything, it is in the OR expression
+              // so probably we pushed previously
+              return;
+            }
+          }
+        }
+        ExprNodeGenericFuncDesc newPred = ExprNodeGenericFuncDesc.newInstance(
+                new GenericUDFOPAnd(),
+                Arrays.<ExprNodeDesc>asList(tableScanExprNode.clone(), filterExprNode));
+        filterOp.getConf().setPredicate(newPred);
+      } else {
+        Operator<FilterDesc> newOp = OperatorFactory.get(tsOp.getCompilationOpContext(),
+                new FilterDesc(tableScanExprNode.clone(), false),
+                new RowSchema(tsOp.getSchema().getSignature()));
+        tsOp.replaceChild(op, newOp);
+        newOp.getParentOperators().add(tsOp);
+        op.replaceParent(tsOp, newOp);
+        newOp.getChildOperators().add(op);
+        // Add to cache (same group as tsOp)
+        optimizerCache.putIfWorkExists(newOp, tsOp);
+      }
+    }
+  }
+
+  /** Cache to accelerate optimization */
+  private static class SharedScanOptimizerCache {
+    // Operators that belong to each work
+    final HashMultimap<Operator<?>, Operator<?>> operatorToWorkOperators =
+            HashMultimap.<Operator<?>, Operator<?>>create();
+    // Table scan operators to DPP sources
+    final Multimap<TableScanOperator, Operator<?>> tableScanToDPPSource =
+            HashMultimap.<TableScanOperator, Operator<?>>create();
+
+    // Add new operator to cache work group of existing operator (if group exists)
+    void putIfWorkExists(Operator<?> opToAdd, Operator<?> existingOp) {
+      List<Operator<?>> c = ImmutableList.copyOf(operatorToWorkOperators.get(existingOp));
+      if (!c.isEmpty()) {
+        for (Operator<?> op : c) {
+          operatorToWorkOperators.get(op).add(opToAdd);
+        }
+        operatorToWorkOperators.putAll(opToAdd, c);
+        operatorToWorkOperators.put(opToAdd, opToAdd);
+      }
+    }
+
+    // Remove operator and combine
+    void removeOpAndCombineWork(Operator<?> opToRemove, Operator<?> replacementOp) {
+      Set<Operator<?>> s = operatorToWorkOperators.get(opToRemove);
+      s.remove(opToRemove);
+      List<Operator<?>> c1 = ImmutableList.copyOf(s);
+      List<Operator<?>> c2 = ImmutableList.copyOf(operatorToWorkOperators.get(replacementOp));
+      if (!c1.isEmpty() && !c2.isEmpty()) {
+        for (Operator<?> op1 : c1) {
+          operatorToWorkOperators.remove(op1, opToRemove); // Remove operator
+          operatorToWorkOperators.putAll(op1, c2); // Add ops of new collection
+        }
+        operatorToWorkOperators.removeAll(opToRemove); // Remove entry for operator
+        for (Operator<?> op2 : c2) {
+          operatorToWorkOperators.putAll(op2, c1); // Add ops to existing collection
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
index f78bd7c..53abb21 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
@@ -282,7 +282,7 @@ public final class GenMRSkewJoinProcessor {
           newJoinValues, newJoinValueTblDesc, newJoinValueTblDesc,joinDescriptor
           .getOutputColumnNames(), i, joinDescriptor.getConds(),
           joinDescriptor.getFilters(), joinDescriptor.getNoOuterJoin(), dumpFilePrefix,
-          joinDescriptor.getNoConditionalTaskSize());
+          joinDescriptor.getNoConditionalTaskSize(), joinDescriptor.getInMemoryDataSize());
       mapJoinDescriptor.setTagOrder(tags);
       mapJoinDescriptor.setHandleSkewJoin(false);
       mapJoinDescriptor.setNullSafes(joinDescriptor.getNullSafes());

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
index c970611..a5f0b2a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
@@ -241,7 +241,7 @@ public class GenSparkSkewJoinProcessor {
           newJoinValues, newJoinValueTblDesc, newJoinValueTblDesc, joinDescriptor
           .getOutputColumnNames(), i, joinDescriptor.getConds(),
           joinDescriptor.getFilters(), joinDescriptor.getNoOuterJoin(), dumpFilePrefix,
-          joinDescriptor.getNoConditionalTaskSize());
+          joinDescriptor.getNoConditionalTaskSize(), joinDescriptor.getInMemoryDataSize());
       mapJoinDescriptor.setTagOrder(tags);
       mapJoinDescriptor.setHandleSkewJoin(false);
       mapJoinDescriptor.setNullSafes(joinDescriptor.getNullSafes());

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
index c87de16..188590f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java
@@ -291,8 +291,14 @@ public class GenTezWork implements NodeProcessor {
               // of the downstream work
               for (ReduceSinkOperator r:
                      context.linkWorkWithReduceSinkMap.get(parentWork)) {
+                if (!context.mapJoinParentMap.get(mj).contains(r)) {
+                  // We might be visiting twice because of reutilization of intermediary results.
+                  // If that is the case, we do not need to do anything because either we have
+                  // already connected this RS operator or we will connect it at subsequent pass.
+                  continue;
+                }
                 if (r.getConf().getOutputName() != null) {
-                  LOG.debug("Cloning reduce sink for multi-child broadcast edge");
+                  LOG.debug("Cloning reduce sink " + r + " for multi-child broadcast edge");
                   // we've already set this one up. Need to clone for the next work.
                   r = (ReduceSinkOperator) OperatorFactory.getAndMakeChild(
                       r.getCompilationOpContext(), (ReduceSinkDesc)r.getConf().clone(),
@@ -370,7 +376,7 @@ public class GenTezWork implements NodeProcessor {
       long bytesPerReducer = context.conf.getLongVar(HiveConf.ConfVars.BYTESPERREDUCER);
 
       LOG.debug("Second pass. Leaf operator: "+operator
-        +" has common downstream work:"+followingWork);
+        +" has common downstream work: "+followingWork);
 
       if (operator instanceof DummyStoreOperator) {
         // this is the small table side.

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
index f469cd2..7e156f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
@@ -17,26 +17,55 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
-import org.apache.hadoop.hive.ql.optimizer.physical.LlapClusterStateForCompile;
-
-import com.google.common.base.Preconditions;
 import java.io.Serializable;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Stack;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.hadoop.hive.ql.exec.*;
-import org.apache.hadoop.hive.ql.lib.*;
-import org.apache.hadoop.hive.ql.plan.*;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBloomFilter.GenericUDAFBloomFilterEvaluator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator;
+import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator;
+import org.apache.hadoop.hive.ql.exec.ConditionalTask;
+import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.GroupByOperator;
+import org.apache.hadoop.hive.ql.exec.JoinOperator;
+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator;
+import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.lib.CompositeProcessor;
+import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.ForwardWalker;
+import org.apache.hadoop.hive.ql.lib.GraphWalker;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.lib.PreOrderOnceWalker;
+import org.apache.hadoop.hive.ql.lib.Rule;
+import org.apache.hadoop.hive.ql.lib.RuleRegExp;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.optimizer.ConstantPropagate;
@@ -47,9 +76,11 @@ import org.apache.hadoop.hive.ql.optimizer.MergeJoinProc;
 import org.apache.hadoop.hive.ql.optimizer.ReduceSinkMapJoinProc;
 import org.apache.hadoop.hive.ql.optimizer.RemoveDynamicPruningBySize;
 import org.apache.hadoop.hive.ql.optimizer.SetReducerParallelism;
+import org.apache.hadoop.hive.ql.optimizer.SharedScanOptimizer;
 import org.apache.hadoop.hive.ql.optimizer.metainfo.annotation.AnnotateWithOpTraits;
 import org.apache.hadoop.hive.ql.optimizer.physical.AnnotateRunTimeStatsOptimizer;
 import org.apache.hadoop.hive.ql.optimizer.physical.CrossProductCheck;
+import org.apache.hadoop.hive.ql.optimizer.physical.LlapClusterStateForCompile;
 import org.apache.hadoop.hive.ql.optimizer.physical.LlapDecider;
 import org.apache.hadoop.hive.ql.optimizer.physical.LlapPreVectorizationPass;
 import org.apache.hadoop.hive.ql.optimizer.physical.MemoryDecider;
@@ -60,10 +91,25 @@ import org.apache.hadoop.hive.ql.optimizer.physical.SerializeFilter;
 import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
 import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer;
 import org.apache.hadoop.hive.ql.optimizer.stats.annotation.AnnotateWithStatistics;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ColStatistics;
+import org.apache.hadoop.hive.ql.plan.DynamicPruningEventDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
+import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.MapWork;
+import org.apache.hadoop.hive.ql.plan.MoveWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.Statistics;
+import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
-import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBloomFilter.GenericUDAFBloomFilterEvaluator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * TezCompiler translates the operator plan into TezTasks.
@@ -92,6 +138,7 @@ public class TezCompiler extends TaskCompiler {
     PerfLogger perfLogger = SessionState.getPerfLogger();
     // Create the context for the walker
     OptimizeTezProcContext procCtx = new OptimizeTezProcContext(conf, pCtx, inputs, outputs);
+
     perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.TEZ_COMPILER);
     // setup dynamic partition pruning where possible
     runDynamicPartitionPruning(procCtx, inputs, outputs);
@@ -136,6 +183,12 @@ public class TezCompiler extends TaskCompiler {
     runCycleAnalysisForPartitionPruning(procCtx, inputs, outputs);
     perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.TEZ_COMPILER, "Run cycle analysis for partition pruning");
 
+    perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.TEZ_COMPILER);
+    if(procCtx.conf.getBoolVar(ConfVars.HIVE_SHARED_SCAN_OPTIMIZATION)) {
+      new SharedScanOptimizer().transform(procCtx.parseContext);
+    }
+    perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.TEZ_COMPILER, "Shared scans optimization");
+
     // need a new run of the constant folding because we might have created lots
     // of "and true and true" conditions.
     // Rather than run the full constant folding just need to shortcut AND/OR expressions

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
index c4fb3f3..12e1ff5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
@@ -107,6 +107,9 @@ public class JoinDesc extends AbstractOperatorDesc {
   private transient boolean leftInputJoin;
   private transient List<String> streamAliases;
 
+  // represents the total memory that this Join operator will use if it is a MapJoin operator
+  protected transient long inMemoryDataSize;
+
   // non-transient field, used at runtime to kill a task if it exceeded memory limits when running in LLAP
   protected long noConditionalTaskSize;
 
@@ -202,6 +205,7 @@ public class JoinDesc extends AbstractOperatorDesc {
     this.residualFilterExprs = clone.residualFilterExprs;
     this.statistics = clone.statistics;
     this.noConditionalTaskSize = clone.noConditionalTaskSize;
+    this.inMemoryDataSize = clone.inMemoryDataSize;
   }
 
   public Map<Byte, List<ExprNodeDesc>> getExprs() {
@@ -696,4 +700,12 @@ public class JoinDesc extends AbstractOperatorDesc {
   public void setNoConditionalTaskSize(final long noConditionalTaskSize) {
     this.noConditionalTaskSize = noConditionalTaskSize;
   }
+
+  public long getInMemoryDataSize() {
+    return inMemoryDataSize;
+  }
+
+  public void setInMemoryDataSize(final long inMemoryDataSize) {
+    this.inMemoryDataSize = inMemoryDataSize;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
index 8da85d2..f387e6a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
@@ -113,7 +113,7 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
     final List<TableDesc> valueTblDescs, final List<TableDesc> valueFilteredTblDescs, List<String> outputColumnNames,
     final int posBigTable, final JoinCondDesc[] conds,
     final Map<Byte, List<ExprNodeDesc>> filters, boolean noOuterJoin, String dumpFilePrefix,
-    final long noConditionalTaskSize) {
+    final long noConditionalTaskSize, final long inMemoryDataSize) {
     super(values, outputColumnNames, noOuterJoin, conds, filters, null, noConditionalTaskSize);
     vectorDesc = null;
     this.keys = keys;
@@ -123,6 +123,7 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
     this.posBigTable = posBigTable;
     this.bigTableBucketNumMapping = new LinkedHashMap<String, Integer>();
     this.dumpFilePrefix = dumpFilePrefix;
+    this.inMemoryDataSize = inMemoryDataSize;
     initRetainExprList();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/queries/clientpositive/perf/query88.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/perf/query88.q b/ql/src/test/queries/clientpositive/perf/query88.q
index 2be814e..bb6ef6d 100644
--- a/ql/src/test/queries/clientpositive/perf/query88.q
+++ b/ql/src/test/queries/clientpositive/perf/query88.q
@@ -1,3 +1,5 @@
+set hive.strict.checks.cartesian.product=false;
+
 explain
 select  *
 from

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/auto_join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/auto_join0.q.out b/ql/src/test/results/clientpositive/llap/auto_join0.q.out
index cba6001..6d051ea 100644
--- a/ql/src/test/results/clientpositive/llap/auto_join0.q.out
+++ b/ql/src/test/results/clientpositive/llap/auto_join0.q.out
@@ -30,10 +30,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -52,13 +52,6 @@ STAGE PLANS:
                         key expressions: _col0 (type: string), _col1 (type: string)
                         sort order: ++
                         Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key < 10) (type: boolean)
                     Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
@@ -87,7 +80,7 @@ STAGE PLANS:
                     1 
                   outputColumnNames: _col0, _col1, _col2, _col3
                   input vertices:
-                    1 Reducer 6
+                    1 Reducer 5
                   Statistics: Num rows: 27556 Data size: 9809936 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
@@ -124,7 +117,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/auto_join30.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/auto_join30.q.out b/ql/src/test/results/clientpositive/llap/auto_join30.q.out
index a26db55..cc59c5c 100644
--- a/ql/src/test/results/clientpositive/llap/auto_join30.q.out
+++ b/ql/src/test/results/clientpositive/llap/auto_join30.q.out
@@ -457,9 +457,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (SIMPLE_EDGE), Reducer 2 (BROADCAST_EDGE), Reducer 7 (BROADCAST_EDGE)
+        Reducer 4 <- Map 3 (SIMPLE_EDGE), Reducer 2 (BROADCAST_EDGE), Reducer 6 (BROADCAST_EDGE)
         Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -497,13 +497,6 @@ STAGE PLANS:
                         sort order: +
                         Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -548,7 +541,7 @@ STAGE PLANS:
                   outputColumnNames: _col2, _col3
                   input vertices:
                     0 Reducer 2
-                    2 Reducer 7
+                    2 Reducer 6
                   Statistics: Num rows: 2974 Data size: 529372 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: sum(hash(_col2,_col3))
@@ -574,7 +567,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -650,10 +643,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -685,13 +678,6 @@ STAGE PLANS:
                       sort order: +
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -765,7 +751,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -841,10 +827,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -876,13 +862,6 @@ STAGE PLANS:
                       sort order: +
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -956,7 +935,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1032,10 +1011,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1067,13 +1046,6 @@ STAGE PLANS:
                       sort order: +
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1147,7 +1119,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1223,10 +1195,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1258,13 +1230,6 @@ STAGE PLANS:
                       sort order: +
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1338,7 +1303,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_9.q.out b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_9.q.out
index b69d0bd..bdb30d7 100644
--- a/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_9.q.out
+++ b/ql/src/test/results/clientpositive/llap/auto_sortmerge_join_9.q.out
@@ -475,7 +475,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Map 1 <- Map 3 (BROADCAST_EDGE)
-        Map 4 <- Map 6 (BROADCAST_EDGE)
+        Map 4 <- Map 3 (BROADCAST_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
@@ -533,6 +533,18 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: no inputs
         Map 4 
@@ -555,7 +567,7 @@ STAGE PLANS:
                           1 _col0 (type: int)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 6
+                          1 Map 3
                         Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
                           aggregations: count()
@@ -571,25 +583,6 @@ STAGE PLANS:
                             value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: int)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -2306,7 +2299,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Map 1 <- Map 3 (BROADCAST_EDGE)
-        Map 4 <- Map 6 (BROADCAST_EDGE)
+        Map 4 <- Map 3 (BROADCAST_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
@@ -2364,6 +2357,18 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: no inputs
         Map 4 
@@ -2386,7 +2391,7 @@ STAGE PLANS:
                           1 _col0 (type: int)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 6
+                          1 Map 3
                         Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
                           aggregations: count()
@@ -2402,25 +2407,6 @@ STAGE PLANS:
                             value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: int)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
index 964d058..042c60b 100644
--- a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez1.q.out
@@ -757,7 +757,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Map 1 <- Map 3 (CUSTOM_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -818,13 +818,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 242 Data size: 4502 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 242 Data size: 4502 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 242 Data size: 4502 Basic stats: COMPLETE Column stats: NONE
@@ -861,7 +854,7 @@ STAGE PLANS:
                       1 _col0 (type: int)
                     outputColumnNames: _col0, _col1, _col3
                     input vertices:
-                      1 Map 4
+                      1 Map 3
                     Statistics: Num rows: 302 Data size: 5633 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col1 (type: int), _col0 (type: double), _col3 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out b/ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out
index b628cb1..291a1f2 100644
--- a/ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out
+++ b/ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out
@@ -1738,9 +1738,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1780,13 +1780,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: z
-                  Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1864,7 +1857,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1929,9 +1922,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1971,13 +1964,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: z
-                  Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2055,7 +2041,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator


[07/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query92.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query92.q.out b/ql/src/test/results/clientpositive/perf/query92.q.out
index a53c8e7..d3abc23 100644
--- a/ql/src/test/results/clientpositive/perf/query92.q.out
+++ b/ql/src/test/results/clientpositive/perf/query92.q.out
@@ -7,10 +7,10 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
-Reducer 8 <- Map 10 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
+Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -40,15 +40,6 @@ Stage-0
                           Output:["_col0","_col1"],keys:_col2, _col1
                           Merge Join Operator [MERGEJOIN_46] (rows=633595212 width=88)
                             Conds:RS_6._col0=RS_7._col0(Inner),Output:["_col1","_col2"]
-                          <-Map 1 [SIMPLE_EDGE]
-                            SHUFFLE [RS_6]
-                              PartitionCols:_col0
-                              Select Operator [SEL_2] (rows=575995635 width=88)
-                                Output:["_col0","_col1","_col2"]
-                                Filter Operator [FIL_42] (rows=575995635 width=88)
-                                  predicate:ss_sold_date_sk is not null
-                                  TableScan [TS_0] (rows=575995635 width=88)
-                                    default@store_sales,ss,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk"]
                           <-Map 6 [SIMPLE_EDGE]
                             SHUFFLE [RS_7]
                               PartitionCols:_col0
@@ -58,28 +49,36 @@ Stage-0
                                   predicate:((d_month_seq >= 1206) and (d_month_seq <= 1217) and d_date_sk is not null)
                                   TableScan [TS_3] (rows=73049 width=1119)
                                     default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
-                <-Reducer 9 [SIMPLE_EDGE]
+                          <-Map 1 [SIMPLE_EDGE]
+                            SHUFFLE [RS_6]
+                              PartitionCols:_col0
+                              Select Operator [SEL_2] (rows=575995635 width=88)
+                                Output:["_col0","_col1","_col2"]
+                                Filter Operator [FIL_42] (rows=575995635 width=88)
+                                  predicate:ss_sold_date_sk is not null
+                                  TableScan [TS_0] (rows=575995635 width=88)
+                                    default@store_sales,ss,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk"]
+                <-Reducer 8 [SIMPLE_EDGE]
                   SHUFFLE [RS_29]
                     PartitionCols:_col0, _col1
                     Group By Operator [GBY_26] (rows=158394413 width=135)
                       Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                    <-Reducer 8 [SIMPLE_EDGE]
+                    <-Reducer 7 [SIMPLE_EDGE]
                       SHUFFLE [RS_25]
                         PartitionCols:_col0, _col1
                         Group By Operator [GBY_24] (rows=316788826 width=135)
                           Output:["_col0","_col1"],keys:_col1, _col2
                           Merge Join Operator [MERGEJOIN_47] (rows=316788826 width=135)
                             Conds:RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col2"]
-                          <-Map 10 [SIMPLE_EDGE]
+                          <-Map 6 [SIMPLE_EDGE]
                             SHUFFLE [RS_21]
                               PartitionCols:_col0
                               Select Operator [SEL_19] (rows=8116 width=1119)
                                 Output:["_col0"]
                                 Filter Operator [FIL_45] (rows=8116 width=1119)
                                   predicate:((d_month_seq >= 1206) and (d_month_seq <= 1217) and d_date_sk is not null)
-                                  TableScan [TS_17] (rows=73049 width=1119)
-                                    default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
-                          <-Map 7 [SIMPLE_EDGE]
+                                   Please refer to the previous TableScan [TS_3]
+                          <-Map 9 [SIMPLE_EDGE]
                             SHUFFLE [RS_20]
                               PartitionCols:_col0
                               Select Operator [SEL_16] (rows=287989836 width=135)

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query95.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query95.q.out b/ql/src/test/results/clientpositive/perf/query95.q.out
index 9b0d1b2..fa94d08 100644
--- a/ql/src/test/results/clientpositive/perf/query95.q.out
+++ b/ql/src/test/results/clientpositive/perf/query95.q.out
@@ -5,14 +5,14 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
-Reducer 12 <- Map 14 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-Reducer 3 <- Map 15 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 16 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 17 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 10 <- Map 12 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 3 <- Map 13 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 14 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 5 <- Map 15 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-Reducer 8 <- Map 7 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
+Reducer 8 <- Map 11 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 9 <- Map 11 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -28,7 +28,7 @@ Stage-0
               Output:["_col0","_col1","_col2","_col3"],aggregations:["count(DISTINCT _col3)","sum(_col4)","sum(_col5)"],keys:_col3
               Merge Join Operator [MERGEJOIN_122] (rows=510219083 width=135)
                 Conds:RS_55._col2=RS_56._col0(Inner),Output:["_col3","_col4","_col5"]
-              <-Map 17 [SIMPLE_EDGE]
+              <-Map 15 [SIMPLE_EDGE]
                 SHUFFLE [RS_56]
                   PartitionCols:_col0
                   Select Operator [SEL_40] (rows=42 width=1850)
@@ -42,7 +42,7 @@ Stage-0
                   PartitionCols:_col2
                   Merge Join Operator [MERGEJOIN_121] (rows=463835520 width=135)
                     Conds:RS_52._col1=RS_53._col0(Inner),Output:["_col2","_col3","_col4","_col5"]
-                  <-Map 16 [SIMPLE_EDGE]
+                  <-Map 14 [SIMPLE_EDGE]
                     SHUFFLE [RS_53]
                       PartitionCols:_col0
                       Select Operator [SEL_37] (rows=20000000 width=1014)
@@ -56,7 +56,7 @@ Stage-0
                       PartitionCols:_col1
                       Merge Join Operator [MERGEJOIN_120] (rows=421668646 width=135)
                         Conds:RS_49._col0=RS_50._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col5"]
-                      <-Map 15 [SIMPLE_EDGE]
+                      <-Map 13 [SIMPLE_EDGE]
                         SHUFFLE [RS_50]
                           PartitionCols:_col0
                           Select Operator [SEL_34] (rows=8116 width=1119)
@@ -79,7 +79,7 @@ Stage-0
                                   predicate:(ws_ship_addr_sk is not null and ws_web_site_sk is not null and ws_ship_date_sk is not null and ws_order_number is not null)
                                   TableScan [TS_0] (rows=144002668 width=135)
                                     default@web_sales,ws1,Tbl:COMPLETE,Col:NONE,Output:["ws_ship_date_sk","ws_ship_addr_sk","ws_web_site_sk","ws_order_number","ws_ext_ship_cost","ws_net_profit"]
-                          <-Reducer 12 [SIMPLE_EDGE]
+                          <-Reducer 10 [SIMPLE_EDGE]
                             SHUFFLE [RS_47]
                               PartitionCols:_col0
                               Group By Operator [GBY_44] (rows=174243235 width=135)
@@ -88,7 +88,7 @@ Stage-0
                                   Output:["_col0"]
                                   Merge Join Operator [MERGEJOIN_118] (rows=174243235 width=135)
                                     Conds:RS_28._col0=RS_29._col0(Inner),Output:["_col1"]
-                                  <-Map 14 [SIMPLE_EDGE]
+                                  <-Map 12 [SIMPLE_EDGE]
                                     SHUFFLE [RS_29]
                                       PartitionCols:_col0
                                       Select Operator [SEL_27] (rows=14398467 width=92)
@@ -97,7 +97,7 @@ Stage-0
                                           predicate:wr_order_number is not null
                                           TableScan [TS_25] (rows=14398467 width=92)
                                             default@web_returns,wr,Tbl:COMPLETE,Col:NONE,Output:["wr_order_number"]
-                                  <-Reducer 11 [SIMPLE_EDGE]
+                                  <-Reducer 9 [SIMPLE_EDGE]
                                     SHUFFLE [RS_28]
                                       PartitionCols:_col0
                                       Select Operator [SEL_24] (rows=158402938 width=135)
@@ -106,24 +106,24 @@ Stage-0
                                           predicate:(_col0 <> _col2)
                                           Merge Join Operator [MERGEJOIN_117] (rows=158402938 width=135)
                                             Conds:RS_20._col1=RS_21._col1(Inner),Output:["_col0","_col1","_col2"]
-                                          <-Map 10 [SIMPLE_EDGE]
-                                            SHUFFLE [RS_20]
-                                              PartitionCols:_col1
-                                              Select Operator [SEL_16] (rows=144002668 width=135)
-                                                Output:["_col0","_col1"]
-                                                Filter Operator [FIL_110] (rows=144002668 width=135)
-                                                  predicate:ws_order_number is not null
-                                                  TableScan [TS_14] (rows=144002668 width=135)
-                                                    default@web_sales,ws4,Tbl:COMPLETE,Col:NONE,Output:["ws_warehouse_sk","ws_order_number"]
-                                          <-Map 13 [SIMPLE_EDGE]
+                                          <-Map 11 [SIMPLE_EDGE]
                                             SHUFFLE [RS_21]
                                               PartitionCols:_col1
                                               Select Operator [SEL_19] (rows=144002668 width=135)
                                                 Output:["_col0","_col1"]
                                                 Filter Operator [FIL_111] (rows=144002668 width=135)
                                                   predicate:ws_order_number is not null
-                                                  TableScan [TS_17] (rows=144002668 width=135)
-                                                    default@web_sales,ws5,Tbl:COMPLETE,Col:NONE,Output:["ws_warehouse_sk","ws_order_number"]
+                                                  TableScan [TS_6] (rows=144002668 width=135)
+                                                    default@web_sales,ws3,Tbl:COMPLETE,Col:NONE,Output:["ws_warehouse_sk","ws_order_number"]
+                                          <-Map 7 [SIMPLE_EDGE]
+                                            SHUFFLE [RS_20]
+                                              PartitionCols:_col1
+                                              Select Operator [SEL_16] (rows=144002668 width=135)
+                                                Output:["_col0","_col1"]
+                                                Filter Operator [FIL_110] (rows=144002668 width=135)
+                                                  predicate:ws_order_number is not null
+                                                  TableScan [TS_3] (rows=144002668 width=135)
+                                                    default@web_sales,ws2,Tbl:COMPLETE,Col:NONE,Output:["ws_warehouse_sk","ws_order_number"]
                           <-Reducer 8 [SIMPLE_EDGE]
                             SHUFFLE [RS_46]
                               PartitionCols:_col0
@@ -135,6 +135,14 @@ Stage-0
                                     predicate:(_col0 <> _col2)
                                     Merge Join Operator [MERGEJOIN_116] (rows=158402938 width=135)
                                       Conds:RS_9._col1=RS_10._col1(Inner),Output:["_col0","_col1","_col2"]
+                                    <-Map 11 [SIMPLE_EDGE]
+                                      SHUFFLE [RS_10]
+                                        PartitionCols:_col1
+                                        Select Operator [SEL_8] (rows=144002668 width=135)
+                                          Output:["_col0","_col1"]
+                                          Filter Operator [FIL_109] (rows=144002668 width=135)
+                                            predicate:ws_order_number is not null
+                                             Please refer to the previous TableScan [TS_6]
                                     <-Map 7 [SIMPLE_EDGE]
                                       SHUFFLE [RS_9]
                                         PartitionCols:_col1
@@ -142,15 +150,5 @@ Stage-0
                                           Output:["_col0","_col1"]
                                           Filter Operator [FIL_108] (rows=144002668 width=135)
                                             predicate:ws_order_number is not null
-                                            TableScan [TS_3] (rows=144002668 width=135)
-                                              default@web_sales,ws2,Tbl:COMPLETE,Col:NONE,Output:["ws_warehouse_sk","ws_order_number"]
-                                    <-Map 9 [SIMPLE_EDGE]
-                                      SHUFFLE [RS_10]
-                                        PartitionCols:_col1
-                                        Select Operator [SEL_8] (rows=144002668 width=135)
-                                          Output:["_col0","_col1"]
-                                          Filter Operator [FIL_109] (rows=144002668 width=135)
-                                            predicate:ws_order_number is not null
-                                            TableScan [TS_6] (rows=144002668 width=135)
-                                              default@web_sales,ws3,Tbl:COMPLETE,Col:NONE,Output:["ws_warehouse_sk","ws_order_number"]
+                                             Please refer to the previous TableScan [TS_3]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query97.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query97.q.out b/ql/src/test/results/clientpositive/perf/query97.q.out
index 54152e9..ef9a791 100644
--- a/ql/src/test/results/clientpositive/perf/query97.q.out
+++ b/ql/src/test/results/clientpositive/perf/query97.q.out
@@ -7,10 +7,10 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
-Reducer 8 <- Map 10 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
+Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -42,15 +42,6 @@ Stage-0
                             Output:["_col0","_col1"],keys:_col2, _col1
                             Merge Join Operator [MERGEJOIN_47] (rows=633595212 width=88)
                               Conds:RS_6._col0=RS_7._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 1 [SIMPLE_EDGE]
-                              SHUFFLE [RS_6]
-                                PartitionCols:_col0
-                                Select Operator [SEL_2] (rows=575995635 width=88)
-                                  Output:["_col0","_col1","_col2"]
-                                  Filter Operator [FIL_43] (rows=575995635 width=88)
-                                    predicate:ss_sold_date_sk is not null
-                                    TableScan [TS_0] (rows=575995635 width=88)
-                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk"]
                             <-Map 6 [SIMPLE_EDGE]
                               SHUFFLE [RS_7]
                                 PartitionCols:_col0
@@ -60,28 +51,36 @@ Stage-0
                                     predicate:(d_month_seq BETWEEN 1193 AND 1204 and d_date_sk is not null)
                                     TableScan [TS_3] (rows=73049 width=1119)
                                       default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
-                  <-Reducer 9 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
+                              SHUFFLE [RS_6]
+                                PartitionCols:_col0
+                                Select Operator [SEL_2] (rows=575995635 width=88)
+                                  Output:["_col0","_col1","_col2"]
+                                  Filter Operator [FIL_43] (rows=575995635 width=88)
+                                    predicate:ss_sold_date_sk is not null
+                                    TableScan [TS_0] (rows=575995635 width=88)
+                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk"]
+                  <-Reducer 8 [SIMPLE_EDGE]
                     SHUFFLE [RS_29]
                       PartitionCols:_col0, _col1
                       Group By Operator [GBY_26] (rows=158394413 width=135)
                         Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                      <-Reducer 8 [SIMPLE_EDGE]
+                      <-Reducer 7 [SIMPLE_EDGE]
                         SHUFFLE [RS_25]
                           PartitionCols:_col0, _col1
                           Group By Operator [GBY_24] (rows=316788826 width=135)
                             Output:["_col0","_col1"],keys:_col1, _col2
                             Merge Join Operator [MERGEJOIN_48] (rows=316788826 width=135)
                               Conds:RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col2"]
-                            <-Map 10 [SIMPLE_EDGE]
+                            <-Map 6 [SIMPLE_EDGE]
                               SHUFFLE [RS_21]
                                 PartitionCols:_col0
                                 Select Operator [SEL_19] (rows=8116 width=1119)
                                   Output:["_col0"]
                                   Filter Operator [FIL_46] (rows=8116 width=1119)
                                     predicate:(d_month_seq BETWEEN 1193 AND 1204 and d_date_sk is not null)
-                                    TableScan [TS_17] (rows=73049 width=1119)
-                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
-                            <-Map 7 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_3]
+                            <-Map 9 [SIMPLE_EDGE]
                               SHUFFLE [RS_20]
                                 PartitionCols:_col0
                                 Select Operator [SEL_16] (rows=287989836 width=135)

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/tez/explainanalyze_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_2.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_2.q.out
index ea572cd..f6844c4 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_2.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_2.q.out
@@ -42,15 +42,15 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
-Map 11 <- Union 12 (CONTAINS)
-Map 16 <- Union 12 (CONTAINS)
+Map 13 <- Union 14 (CONTAINS)
+Map 16 <- Union 14 (CONTAINS)
 Map 8 <- Union 2 (CONTAINS)
-Reducer 13 <- Union 12 (SIMPLE_EDGE)
-Reducer 14 <- Map 17 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-Reducer 15 <- Map 18 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 10 <- Map 9 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 11 <- Map 17 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 15 <- Union 14 (SIMPLE_EDGE)
 Reducer 3 <- Union 2 (SIMPLE_EDGE)
 Reducer 4 <- Map 9 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 10 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 5 <- Map 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE)
 
 Stage-0
@@ -62,14 +62,14 @@ Stage-0
         Group By Operator [GBY_54] (rows=28/15 width=177)
           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
         <-Union 6 [SIMPLE_EDGE]
-          <-Reducer 15 [CONTAINS]
+          <-Reducer 11 [CONTAINS]
             Reduce Output Operator [RS_53]
               PartitionCols:_col0, _col1
               Select Operator [SEL_49] (rows=148/61 width=177)
                 Output:["_col0","_col1"]
                 Merge Join Operator [MERGEJOIN_82] (rows=148/61 width=177)
                   Conds:RS_46._col2=RS_47._col0(Inner),Output:["_col1","_col2"]
-                <-Map 18 [SIMPLE_EDGE]
+                <-Map 17 [SIMPLE_EDGE]
                   SHUFFLE [RS_47]
                     PartitionCols:_col0
                     Select Operator [SEL_42] (rows=500/500 width=87)
@@ -78,29 +78,29 @@ Stage-0
                         predicate:key is not null
                         TableScan [TS_40] (rows=500/500 width=87)
                           default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
-                <-Reducer 14 [SIMPLE_EDGE]
+                <-Reducer 10 [SIMPLE_EDGE]
                   SHUFFLE [RS_46]
                     PartitionCols:_col2
                     Merge Join Operator [MERGEJOIN_81] (rows=61/52 width=177)
                       Conds:RS_43._col1=RS_44._col1(Inner),Output:["_col1","_col2"]
-                    <-Map 17 [SIMPLE_EDGE]
+                    <-Map 9 [SIMPLE_EDGE]
                       SHUFFLE [RS_44]
                         PartitionCols:_col1
                         Select Operator [SEL_39] (rows=25/25 width=175)
                           Output:["_col0","_col1"]
                           Filter Operator [FIL_77] (rows=25/25 width=175)
                             predicate:(key is not null and value is not null)
-                            TableScan [TS_37] (rows=25/25 width=175)
+                            TableScan [TS_12] (rows=25/25 width=175)
                               default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                    <-Reducer 13 [SIMPLE_EDGE]
+                    <-Reducer 15 [SIMPLE_EDGE]
                       SHUFFLE [RS_43]
                         PartitionCols:_col1
                         Select Operator [SEL_36] (rows=525/319 width=178)
                           Output:["_col1"]
                           Group By Operator [GBY_35] (rows=525/319 width=178)
                             Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                          <-Union 12 [SIMPLE_EDGE]
-                            <-Map 11 [CONTAINS]
+                          <-Union 14 [SIMPLE_EDGE]
+                            <-Map 13 [CONTAINS]
                               Reduce Output Operator [RS_34]
                                 PartitionCols:_col1, _col0
                                 Select Operator [SEL_27] (rows=25/25 width=175)
@@ -125,7 +125,7 @@ Stage-0
                 Output:["_col0","_col1"]
                 Merge Join Operator [MERGEJOIN_80] (rows=148/61 width=177)
                   Conds:RS_21._col2=RS_22._col0(Inner),Output:["_col1","_col2"]
-                <-Map 10 [SIMPLE_EDGE]
+                <-Map 12 [SIMPLE_EDGE]
                   SHUFFLE [RS_22]
                     PartitionCols:_col0
                     Select Operator [SEL_17] (rows=500/500 width=87)
@@ -146,8 +146,7 @@ Stage-0
                           Output:["_col0","_col1"]
                           Filter Operator [FIL_73] (rows=25/25 width=175)
                             predicate:(key is not null and value is not null)
-                            TableScan [TS_12] (rows=25/25 width=175)
-                              default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                             Please refer to the previous TableScan [TS_12]
                     <-Reducer 3 [SIMPLE_EDGE]
                       SHUFFLE [RS_18]
                         PartitionCols:_col1
@@ -236,25 +235,25 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
 Map 10 <- Union 2 (CONTAINS)
-Map 13 <- Union 14 (CONTAINS)
-Map 20 <- Union 14 (CONTAINS)
-Map 21 <- Union 16 (CONTAINS)
-Map 24 <- Union 25 (CONTAINS)
-Map 33 <- Union 25 (CONTAINS)
-Map 34 <- Union 27 (CONTAINS)
-Map 35 <- Union 29 (CONTAINS)
-Reducer 15 <- Union 14 (SIMPLE_EDGE), Union 16 (CONTAINS)
-Reducer 17 <- Union 16 (SIMPLE_EDGE)
-Reducer 18 <- Map 22 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-Reducer 19 <- Map 23 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 26 <- Union 25 (SIMPLE_EDGE), Union 27 (CONTAINS)
-Reducer 28 <- Union 27 (SIMPLE_EDGE), Union 29 (CONTAINS)
+Map 17 <- Union 18 (CONTAINS)
+Map 22 <- Union 18 (CONTAINS)
+Map 23 <- Union 20 (CONTAINS)
+Map 25 <- Union 26 (CONTAINS)
+Map 32 <- Union 26 (CONTAINS)
+Map 33 <- Union 28 (CONTAINS)
+Map 34 <- Union 30 (CONTAINS)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 13 <- Map 24 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 14 <- Map 11 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
+Reducer 15 <- Map 16 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Reducer 19 <- Union 18 (SIMPLE_EDGE), Union 20 (CONTAINS)
+Reducer 21 <- Union 20 (SIMPLE_EDGE)
+Reducer 27 <- Union 26 (SIMPLE_EDGE), Union 28 (CONTAINS)
+Reducer 29 <- Union 28 (SIMPLE_EDGE), Union 30 (CONTAINS)
 Reducer 3 <- Union 2 (SIMPLE_EDGE)
-Reducer 30 <- Union 29 (SIMPLE_EDGE)
-Reducer 31 <- Map 36 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE)
-Reducer 32 <- Map 37 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Reducer 31 <- Union 30 (SIMPLE_EDGE)
 Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 5 <- Map 16 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE), Union 8 (CONTAINS)
 Reducer 9 <- Union 8 (SIMPLE_EDGE)
 
@@ -267,45 +266,45 @@ Stage-0
         Group By Operator [GBY_112] (rows=872/15 width=177)
           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
         <-Union 8 [SIMPLE_EDGE]
-          <-Reducer 32 [CONTAINS]
+          <-Reducer 15 [CONTAINS]
             Reduce Output Operator [RS_111]
               PartitionCols:_col0, _col1
               Select Operator [SEL_107] (rows=434/61 width=177)
                 Output:["_col0","_col1"]
                 Merge Join Operator [MERGEJOIN_162] (rows=434/61 width=177)
                   Conds:RS_104._col2=RS_105._col0(Inner),Output:["_col2","_col5"]
-                <-Map 37 [SIMPLE_EDGE]
+                <-Map 16 [SIMPLE_EDGE]
                   SHUFFLE [RS_105]
                     PartitionCols:_col0
                     Select Operator [SEL_100] (rows=500/500 width=178)
                       Output:["_col0","_col1"]
                       Filter Operator [FIL_156] (rows=500/500 width=178)
                         predicate:key is not null
-                        TableScan [TS_98] (rows=500/500 width=178)
+                        TableScan [TS_15] (rows=500/500 width=178)
                           default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                <-Reducer 31 [SIMPLE_EDGE]
+                <-Reducer 14 [SIMPLE_EDGE]
                   SHUFFLE [RS_104]
                     PartitionCols:_col2
                     Merge Join Operator [MERGEJOIN_161] (rows=178/52 width=86)
                       Conds:RS_101._col1=RS_102._col1(Inner),Output:["_col2"]
-                    <-Map 36 [SIMPLE_EDGE]
+                    <-Map 11 [SIMPLE_EDGE]
                       SHUFFLE [RS_102]
                         PartitionCols:_col1
                         Select Operator [SEL_97] (rows=25/25 width=175)
                           Output:["_col0","_col1"]
                           Filter Operator [FIL_155] (rows=25/25 width=175)
                             predicate:(key is not null and value is not null)
-                            TableScan [TS_95] (rows=25/25 width=175)
+                            TableScan [TS_12] (rows=25/25 width=175)
                               default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                    <-Reducer 30 [SIMPLE_EDGE]
+                    <-Reducer 31 [SIMPLE_EDGE]
                       SHUFFLE [RS_101]
                         PartitionCols:_col1
                         Select Operator [SEL_94] (rows=1525/319 width=178)
                           Output:["_col1"]
                           Group By Operator [GBY_93] (rows=1525/319 width=178)
                             Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                          <-Union 29 [SIMPLE_EDGE]
-                            <-Map 35 [CONTAINS]
+                          <-Union 30 [SIMPLE_EDGE]
+                            <-Map 34 [CONTAINS]
                               Reduce Output Operator [RS_92]
                                 PartitionCols:_col1, _col0
                                 Select Operator [SEL_88] (rows=500/500 width=178)
@@ -314,15 +313,15 @@ Stage-0
                                     predicate:value is not null
                                     TableScan [TS_86] (rows=500/500 width=178)
                                       Output:["key","value"]
-                            <-Reducer 28 [CONTAINS]
+                            <-Reducer 29 [CONTAINS]
                               Reduce Output Operator [RS_92]
                                 PartitionCols:_col1, _col0
                                 Select Operator [SEL_85] (rows=1025/319 width=178)
                                   Output:["_col0","_col1"]
                                   Group By Operator [GBY_84] (rows=1025/319 width=178)
                                     Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                  <-Union 27 [SIMPLE_EDGE]
-                                    <-Map 34 [CONTAINS]
+                                  <-Union 28 [SIMPLE_EDGE]
+                                    <-Map 33 [CONTAINS]
                                       Reduce Output Operator [RS_83]
                                         PartitionCols:_col1, _col0
                                         Select Operator [SEL_79] (rows=500/500 width=178)
@@ -331,15 +330,15 @@ Stage-0
                                             predicate:value is not null
                                             TableScan [TS_77] (rows=500/500 width=178)
                                               Output:["key","value"]
-                                    <-Reducer 26 [CONTAINS]
+                                    <-Reducer 27 [CONTAINS]
                                       Reduce Output Operator [RS_83]
                                         PartitionCols:_col1, _col0
                                         Select Operator [SEL_76] (rows=525/319 width=178)
                                           Output:["_col0","_col1"]
                                           Group By Operator [GBY_75] (rows=525/319 width=178)
                                             Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                          <-Union 25 [SIMPLE_EDGE]
-                                            <-Map 24 [CONTAINS]
+                                          <-Union 26 [SIMPLE_EDGE]
+                                            <-Map 25 [CONTAINS]
                                               Reduce Output Operator [RS_74]
                                                 PartitionCols:_col1, _col0
                                                 Select Operator [SEL_67] (rows=25/25 width=175)
@@ -348,7 +347,7 @@ Stage-0
                                                     predicate:value is not null
                                                     TableScan [TS_65] (rows=25/25 width=175)
                                                       Output:["key","value"]
-                                            <-Map 33 [CONTAINS]
+                                            <-Map 32 [CONTAINS]
                                               Reduce Output Operator [RS_74]
                                                 PartitionCols:_col1, _col0
                                                 Select Operator [SEL_70] (rows=500/500 width=178)
@@ -363,14 +362,14 @@ Stage-0
               Group By Operator [GBY_63] (rows=438/15 width=177)
                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
               <-Union 6 [SIMPLE_EDGE]
-                <-Reducer 19 [CONTAINS]
+                <-Reducer 13 [CONTAINS]
                   Reduce Output Operator [RS_62]
                     PartitionCols:_col0, _col1
                     Select Operator [SEL_58] (rows=290/61 width=177)
                       Output:["_col0","_col1"]
                       Merge Join Operator [MERGEJOIN_160] (rows=290/61 width=177)
                         Conds:RS_55._col2=RS_56._col0(Inner),Output:["_col2","_col5"]
-                      <-Map 23 [SIMPLE_EDGE]
+                      <-Map 24 [SIMPLE_EDGE]
                         SHUFFLE [RS_56]
                           PartitionCols:_col0
                           Select Operator [SEL_51] (rows=500/500 width=178)
@@ -379,29 +378,28 @@ Stage-0
                               predicate:key is not null
                               TableScan [TS_49] (rows=500/500 width=178)
                                 default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                      <-Reducer 18 [SIMPLE_EDGE]
+                      <-Reducer 12 [SIMPLE_EDGE]
                         SHUFFLE [RS_55]
                           PartitionCols:_col2
                           Merge Join Operator [MERGEJOIN_159] (rows=119/52 width=86)
                             Conds:RS_52._col1=RS_53._col1(Inner),Output:["_col2"]
-                          <-Map 22 [SIMPLE_EDGE]
+                          <-Map 11 [SIMPLE_EDGE]
                             SHUFFLE [RS_53]
                               PartitionCols:_col1
                               Select Operator [SEL_48] (rows=25/25 width=175)
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_149] (rows=25/25 width=175)
                                   predicate:(key is not null and value is not null)
-                                  TableScan [TS_46] (rows=25/25 width=175)
-                                    default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                          <-Reducer 17 [SIMPLE_EDGE]
+                                   Please refer to the previous TableScan [TS_12]
+                          <-Reducer 21 [SIMPLE_EDGE]
                             SHUFFLE [RS_52]
                               PartitionCols:_col1
                               Select Operator [SEL_45] (rows=1025/319 width=178)
                                 Output:["_col1"]
                                 Group By Operator [GBY_44] (rows=1025/319 width=178)
                                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                <-Union 16 [SIMPLE_EDGE]
-                                  <-Map 21 [CONTAINS]
+                                <-Union 20 [SIMPLE_EDGE]
+                                  <-Map 23 [CONTAINS]
                                     Reduce Output Operator [RS_43]
                                       PartitionCols:_col1, _col0
                                       Select Operator [SEL_39] (rows=500/500 width=178)
@@ -410,15 +408,15 @@ Stage-0
                                           predicate:value is not null
                                           TableScan [TS_37] (rows=500/500 width=178)
                                             Output:["key","value"]
-                                  <-Reducer 15 [CONTAINS]
+                                  <-Reducer 19 [CONTAINS]
                                     Reduce Output Operator [RS_43]
                                       PartitionCols:_col1, _col0
                                       Select Operator [SEL_36] (rows=525/319 width=178)
                                         Output:["_col0","_col1"]
                                         Group By Operator [GBY_35] (rows=525/319 width=178)
                                           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                        <-Union 14 [SIMPLE_EDGE]
-                                          <-Map 13 [CONTAINS]
+                                        <-Union 18 [SIMPLE_EDGE]
+                                          <-Map 17 [CONTAINS]
                                             Reduce Output Operator [RS_34]
                                               PartitionCols:_col1, _col0
                                               Select Operator [SEL_27] (rows=25/25 width=175)
@@ -427,7 +425,7 @@ Stage-0
                                                   predicate:value is not null
                                                   TableScan [TS_25] (rows=25/25 width=175)
                                                     Output:["key","value"]
-                                          <-Map 20 [CONTAINS]
+                                          <-Map 22 [CONTAINS]
                                             Reduce Output Operator [RS_34]
                                               PartitionCols:_col1, _col0
                                               Select Operator [SEL_30] (rows=500/500 width=178)
@@ -443,15 +441,14 @@ Stage-0
                       Output:["_col0","_col1"]
                       Merge Join Operator [MERGEJOIN_158] (rows=148/61 width=177)
                         Conds:RS_21._col2=RS_22._col0(Inner),Output:["_col2","_col5"]
-                      <-Map 12 [SIMPLE_EDGE]
+                      <-Map 16 [SIMPLE_EDGE]
                         SHUFFLE [RS_22]
                           PartitionCols:_col0
                           Select Operator [SEL_17] (rows=500/500 width=178)
                             Output:["_col0","_col1"]
                             Filter Operator [FIL_145] (rows=500/500 width=178)
                               predicate:key is not null
-                              TableScan [TS_15] (rows=500/500 width=178)
-                                default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                               Please refer to the previous TableScan [TS_15]
                       <-Reducer 4 [SIMPLE_EDGE]
                         SHUFFLE [RS_21]
                           PartitionCols:_col2
@@ -464,8 +461,7 @@ Stage-0
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_144] (rows=25/25 width=175)
                                   predicate:(key is not null and value is not null)
-                                  TableScan [TS_12] (rows=25/25 width=175)
-                                    default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                   Please refer to the previous TableScan [TS_12]
                           <-Reducer 3 [SIMPLE_EDGE]
                             SHUFFLE [RS_18]
                               PartitionCols:_col1
@@ -1127,20 +1123,20 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
-Map 13 <- Union 10 (CONTAINS)
-Map 14 <- Union 10 (CONTAINS)
-Map 21 <- Union 22 (CONTAINS)
-Map 23 <- Union 22 (CONTAINS)
-Map 24 <- Union 22 (CONTAINS)
-Map 25 <- Union 22 (CONTAINS)
+Map 13 <- Union 14 (CONTAINS)
+Map 15 <- Union 14 (CONTAINS)
+Map 16 <- Union 14 (CONTAINS)
+Map 19 <- Union 20 (CONTAINS)
+Map 21 <- Union 20 (CONTAINS)
+Map 22 <- Union 20 (CONTAINS)
+Map 23 <- Union 20 (CONTAINS)
 Map 6 <- Union 2 (CONTAINS)
-Map 9 <- Union 10 (CONTAINS)
-Reducer 11 <- Map 15 (SIMPLE_EDGE), Union 10 (SIMPLE_EDGE)
-Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 18 <- Map 17 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
-Reducer 19 <- Reducer 18 (SIMPLE_EDGE), Union 22 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 11 <- Map 10 (SIMPLE_EDGE), Union 14 (SIMPLE_EDGE)
+Reducer 12 <- Map 17 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 3 <- Map 7 (SIMPLE_EDGE), Union 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 8 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 4 <- Map 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 8 <- Map 18 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 8 (SIMPLE_EDGE), Union 20 (SIMPLE_EDGE), Union 5 (CONTAINS)
 
 Stage-5
   Stats-Aggr Operator
@@ -1158,7 +1154,7 @@ Stage-5
                     Output:["_col0","_col1"]
                     Merge Join Operator [MERGEJOIN_126] (rows=5839/5421 width=178)
                       Conds:RS_42._col1=RS_43._col0(Inner),Output:["_col1","_col4"]
-                    <-Map 16 [SIMPLE_EDGE]
+                    <-Map 17 [SIMPLE_EDGE]
                       SHUFFLE [RS_43]
                         PartitionCols:_col0
                         Select Operator [SEL_38] (rows=500/500 width=178)
@@ -1171,27 +1167,36 @@ Stage-5
                       SHUFFLE [RS_42]
                         PartitionCols:_col1
                         Merge Join Operator [MERGEJOIN_125] (rows=2394/2097 width=87)
-                          Conds:Union 10._col0=RS_40._col1(Inner),Output:["_col1"]
-                        <-Map 15 [SIMPLE_EDGE]
+                          Conds:Union 14._col0=RS_40._col1(Inner),Output:["_col1"]
+                        <-Map 10 [SIMPLE_EDGE]
                           SHUFFLE [RS_40]
                             PartitionCols:_col1
                             Select Operator [SEL_35] (rows=500/500 width=178)
                               Output:["_col0","_col1"]
                               Filter Operator [FIL_115] (rows=500/500 width=178)
                                 predicate:(key is not null and value is not null)
-                                TableScan [TS_33] (rows=500/500 width=178)
-                                  default@src,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                        <-Union 10 [SIMPLE_EDGE]
+                                TableScan [TS_11] (rows=500/500 width=178)
+                                  default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                        <-Union 14 [SIMPLE_EDGE]
                           <-Map 13 [CONTAINS]
                             Reduce Output Operator [RS_39]
                               PartitionCols:_col0
+                              Select Operator [SEL_23] (rows=25/25 width=89)
+                                Output:["_col0"]
+                                Filter Operator [FIL_112] (rows=25/25 width=89)
+                                  predicate:value is not null
+                                  TableScan [TS_21] (rows=25/25 width=89)
+                                    Output:["value"]
+                          <-Map 15 [CONTAINS]
+                            Reduce Output Operator [RS_39]
+                              PartitionCols:_col0
                               Select Operator [SEL_26] (rows=500/500 width=91)
                                 Output:["_col0"]
                                 Filter Operator [FIL_113] (rows=500/500 width=91)
                                   predicate:value is not null
                                   TableScan [TS_24] (rows=500/500 width=91)
                                     Output:["value"]
-                          <-Map 14 [CONTAINS]
+                          <-Map 16 [CONTAINS]
                             Reduce Output Operator [RS_39]
                               PartitionCols:_col0
                               Select Operator [SEL_31] (rows=500/500 width=91)
@@ -1200,43 +1205,87 @@ Stage-5
                                   predicate:value is not null
                                   TableScan [TS_29] (rows=500/500 width=91)
                                     Output:["value"]
-                          <-Map 9 [CONTAINS]
-                            Reduce Output Operator [RS_39]
+                File Output Operator [FS_81]
+                  table:{"name:":"default.b"}
+                   Please refer to the previous Select Operator [SEL_45]
+                File Output Operator [FS_83]
+                  table:{"name:":"default.c"}
+                   Please refer to the previous Select Operator [SEL_45]
+              <-Reducer 4 [CONTAINS]
+                File Output Operator [FS_79]
+                  table:{"name:":"default.a"}
+                  Select Operator [SEL_20] (rows=148/170 width=177)
+                    Output:["_col0","_col1"]
+                    Merge Join Operator [MERGEJOIN_124] (rows=148/170 width=177)
+                      Conds:RS_17._col1=RS_18._col0(Inner),Output:["_col1","_col4"]
+                    <-Map 10 [SIMPLE_EDGE]
+                      SHUFFLE [RS_18]
+                        PartitionCols:_col0
+                        Select Operator [SEL_13] (rows=500/500 width=178)
+                          Output:["_col0","_col1"]
+                          Filter Operator [FIL_111] (rows=500/500 width=178)
+                            predicate:key is not null
+                             Please refer to the previous TableScan [TS_11]
+                    <-Reducer 3 [SIMPLE_EDGE]
+                      SHUFFLE [RS_17]
+                        PartitionCols:_col1
+                        Merge Join Operator [MERGEJOIN_123] (rows=61/108 width=86)
+                          Conds:Union 2._col0=RS_15._col1(Inner),Output:["_col1"]
+                        <-Map 7 [SIMPLE_EDGE]
+                          SHUFFLE [RS_15]
+                            PartitionCols:_col1
+                            Select Operator [SEL_10] (rows=25/25 width=175)
+                              Output:["_col0","_col1"]
+                              Filter Operator [FIL_110] (rows=25/25 width=175)
+                                predicate:(key is not null and value is not null)
+                                TableScan [TS_8] (rows=25/25 width=175)
+                                  default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                        <-Union 2 [SIMPLE_EDGE]
+                          <-Map 1 [CONTAINS]
+                            Reduce Output Operator [RS_14]
                               PartitionCols:_col0
-                              Select Operator [SEL_23] (rows=25/25 width=89)
+                              Select Operator [SEL_2] (rows=25/25 width=89)
                                 Output:["_col0"]
-                                Filter Operator [FIL_112] (rows=25/25 width=89)
+                                Filter Operator [FIL_108] (rows=25/25 width=89)
                                   predicate:value is not null
-                                  TableScan [TS_21] (rows=25/25 width=89)
+                                  TableScan [TS_0] (rows=25/25 width=89)
+                                    Output:["value"]
+                          <-Map 6 [CONTAINS]
+                            Reduce Output Operator [RS_14]
+                              PartitionCols:_col0
+                              Select Operator [SEL_5] (rows=500/500 width=91)
+                                Output:["_col0"]
+                                Filter Operator [FIL_109] (rows=500/500 width=91)
+                                  predicate:value is not null
+                                  TableScan [TS_3] (rows=500/500 width=91)
                                     Output:["value"]
                 File Output Operator [FS_81]
                   table:{"name:":"default.b"}
-                   Please refer to the previous Select Operator [SEL_45]
+                   Please refer to the previous Select Operator [SEL_20]
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
-                   Please refer to the previous Select Operator [SEL_45]
-              <-Reducer 19 [CONTAINS]
+                   Please refer to the previous Select Operator [SEL_20]
+              <-Reducer 9 [CONTAINS]
                 File Output Operator [FS_79]
                   table:{"name:":"default.a"}
                   Select Operator [SEL_76] (rows=313/820 width=175)
                     Output:["_col0","_col1"]
                     Merge Join Operator [MERGEJOIN_128] (rows=313/820 width=175)
-                      Conds:RS_73._col1=Union 22._col0(Inner),Output:["_col0","_col3"]
-                    <-Reducer 18 [SIMPLE_EDGE]
+                      Conds:RS_73._col1=Union 20._col0(Inner),Output:["_col0","_col3"]
+                    <-Reducer 8 [SIMPLE_EDGE]
                       SHUFFLE [RS_73]
                         PartitionCols:_col1
                         Merge Join Operator [MERGEJOIN_127] (rows=44/115 width=264)
                           Conds:RS_70._col0=RS_71._col0(Inner),Output:["_col0","_col1","_col3"]
-                        <-Map 17 [SIMPLE_EDGE]
+                        <-Map 7 [SIMPLE_EDGE]
                           SHUFFLE [RS_70]
                             PartitionCols:_col0
                             Select Operator [SEL_50] (rows=25/25 width=175)
                               Output:["_col0","_col1"]
                               Filter Operator [FIL_117] (rows=25/25 width=175)
                                 predicate:(key is not null and value is not null)
-                                TableScan [TS_48] (rows=25/25 width=175)
-                                  default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                        <-Map 20 [SIMPLE_EDGE]
+                                 Please refer to the previous TableScan [TS_8]
+                        <-Map 18 [SIMPLE_EDGE]
                           SHUFFLE [RS_71]
                             PartitionCols:_col0
                             Select Operator [SEL_53] (rows=25/25 width=175)
@@ -1245,8 +1294,8 @@ Stage-5
                                 predicate:key is not null
                                 TableScan [TS_51] (rows=25/25 width=175)
                                   default@src1,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                    <-Union 22 [SIMPLE_EDGE]
-                      <-Map 21 [CONTAINS]
+                    <-Union 20 [SIMPLE_EDGE]
+                      <-Map 19 [CONTAINS]
                         Reduce Output Operator [RS_74]
                           PartitionCols:_col0
                           Select Operator [SEL_56] (rows=25/25 width=89)
@@ -1255,7 +1304,7 @@ Stage-5
                               predicate:value is not null
                               TableScan [TS_54] (rows=25/25 width=89)
                                 Output:["value"]
-                      <-Map 23 [CONTAINS]
+                      <-Map 21 [CONTAINS]
                         Reduce Output Operator [RS_74]
                           PartitionCols:_col0
                           Select Operator [SEL_59] (rows=500/500 width=91)
@@ -1264,7 +1313,7 @@ Stage-5
                               predicate:value is not null
                               TableScan [TS_57] (rows=500/500 width=91)
                                 Output:["value"]
-                      <-Map 24 [CONTAINS]
+                      <-Map 22 [CONTAINS]
                         Reduce Output Operator [RS_74]
                           PartitionCols:_col0
                           Select Operator [SEL_64] (rows=500/500 width=91)
@@ -1273,7 +1322,7 @@ Stage-5
                               predicate:value is not null
                               TableScan [TS_62] (rows=500/500 width=91)
                                 Output:["value"]
-                      <-Map 25 [CONTAINS]
+                      <-Map 23 [CONTAINS]
                         Reduce Output Operator [RS_74]
                           PartitionCols:_col0
                           Select Operator [SEL_68] (rows=500/500 width=91)
@@ -1288,61 +1337,6 @@ Stage-5
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
                    Please refer to the previous Select Operator [SEL_76]
-              <-Reducer 4 [CONTAINS]
-                File Output Operator [FS_79]
-                  table:{"name:":"default.a"}
-                  Select Operator [SEL_20] (rows=148/170 width=177)
-                    Output:["_col0","_col1"]
-                    Merge Join Operator [MERGEJOIN_124] (rows=148/170 width=177)
-                      Conds:RS_17._col1=RS_18._col0(Inner),Output:["_col1","_col4"]
-                    <-Map 8 [SIMPLE_EDGE]
-                      SHUFFLE [RS_18]
-                        PartitionCols:_col0
-                        Select Operator [SEL_13] (rows=500/500 width=178)
-                          Output:["_col0","_col1"]
-                          Filter Operator [FIL_111] (rows=500/500 width=178)
-                            predicate:key is not null
-                            TableScan [TS_11] (rows=500/500 width=178)
-                              default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                    <-Reducer 3 [SIMPLE_EDGE]
-                      SHUFFLE [RS_17]
-                        PartitionCols:_col1
-                        Merge Join Operator [MERGEJOIN_123] (rows=61/108 width=86)
-                          Conds:Union 2._col0=RS_15._col1(Inner),Output:["_col1"]
-                        <-Map 7 [SIMPLE_EDGE]
-                          SHUFFLE [RS_15]
-                            PartitionCols:_col1
-                            Select Operator [SEL_10] (rows=25/25 width=175)
-                              Output:["_col0","_col1"]
-                              Filter Operator [FIL_110] (rows=25/25 width=175)
-                                predicate:(key is not null and value is not null)
-                                TableScan [TS_8] (rows=25/25 width=175)
-                                  default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                        <-Union 2 [SIMPLE_EDGE]
-                          <-Map 1 [CONTAINS]
-                            Reduce Output Operator [RS_14]
-                              PartitionCols:_col0
-                              Select Operator [SEL_2] (rows=25/25 width=89)
-                                Output:["_col0"]
-                                Filter Operator [FIL_108] (rows=25/25 width=89)
-                                  predicate:value is not null
-                                  TableScan [TS_0] (rows=25/25 width=89)
-                                    Output:["value"]
-                          <-Map 6 [CONTAINS]
-                            Reduce Output Operator [RS_14]
-                              PartitionCols:_col0
-                              Select Operator [SEL_5] (rows=500/500 width=91)
-                                Output:["_col0"]
-                                Filter Operator [FIL_109] (rows=500/500 width=91)
-                                  predicate:value is not null
-                                  TableScan [TS_3] (rows=500/500 width=91)
-                                    Output:["value"]
-                File Output Operator [FS_81]
-                  table:{"name:":"default.b"}
-                   Please refer to the previous Select Operator [SEL_20]
-                File Output Operator [FS_83]
-                  table:{"name:":"default.c"}
-                   Please refer to the previous Select Operator [SEL_20]
 Stage-6
   Stats-Aggr Operator
     Stage-1
@@ -1433,25 +1427,25 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
 Map 10 <- Union 2 (CONTAINS)
-Map 13 <- Union 14 (CONTAINS)
-Map 20 <- Union 14 (CONTAINS)
-Map 21 <- Union 16 (CONTAINS)
-Map 28 <- Union 29 (CONTAINS)
-Map 35 <- Union 29 (CONTAINS)
-Map 36 <- Union 31 (CONTAINS)
-Map 37 <- Union 33 (CONTAINS)
-Reducer 15 <- Union 14 (SIMPLE_EDGE), Union 16 (CONTAINS)
-Reducer 17 <- Union 16 (SIMPLE_EDGE)
-Reducer 18 <- Map 22 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-Reducer 19 <- Map 23 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 25 <- Map 24 (SIMPLE_EDGE), Map 27 (SIMPLE_EDGE)
-Reducer 26 <- Reducer 25 (SIMPLE_EDGE), Reducer 34 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Map 17 <- Union 18 (CONTAINS)
+Map 22 <- Union 18 (CONTAINS)
+Map 23 <- Union 20 (CONTAINS)
+Map 26 <- Union 27 (CONTAINS)
+Map 33 <- Union 27 (CONTAINS)
+Map 34 <- Union 29 (CONTAINS)
+Map 35 <- Union 31 (CONTAINS)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 25 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Reducer 32 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Reducer 15 <- Map 14 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 16 <- Map 24 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 19 <- Union 18 (SIMPLE_EDGE), Union 20 (CONTAINS)
+Reducer 21 <- Union 20 (SIMPLE_EDGE)
+Reducer 28 <- Union 27 (SIMPLE_EDGE), Union 29 (CONTAINS)
 Reducer 3 <- Union 2 (SIMPLE_EDGE)
 Reducer 30 <- Union 29 (SIMPLE_EDGE), Union 31 (CONTAINS)
-Reducer 32 <- Union 31 (SIMPLE_EDGE), Union 33 (CONTAINS)
-Reducer 34 <- Union 33 (SIMPLE_EDGE)
+Reducer 32 <- Union 31 (SIMPLE_EDGE)
 Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 5 <- Map 14 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE), Union 8 (CONTAINS)
 Reducer 9 <- Union 8 (SIMPLE_EDGE)
 
@@ -1469,28 +1463,28 @@ Stage-5
                 Group By Operator [GBY_112] (rows=6300/319 width=178)
                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
                 <-Union 8 [SIMPLE_EDGE]
-                  <-Reducer 26 [CONTAINS]
+                  <-Reducer 13 [CONTAINS]
                     Reduce Output Operator [RS_111]
                       PartitionCols:_col0, _col1
                       Select Operator [SEL_107] (rows=313/304 width=175)
                         Output:["_col0","_col1"]
                         Merge Join Operator [MERGEJOIN_164] (rows=313/304 width=175)
                           Conds:RS_104._col1=RS_105._col1(Inner),Output:["_col0","_col3"]
-                        <-Reducer 25 [SIMPLE_EDGE]
+                        <-Reducer 12 [SIMPLE_EDGE]
                           SHUFFLE [RS_104]
                             PartitionCols:_col1
                             Merge Join Operator [MERGEJOIN_163] (rows=44/115 width=264)
                               Conds:RS_101._col0=RS_102._col0(Inner),Output:["_col0","_col1","_col3"]
-                            <-Map 24 [SIMPLE_EDGE]
+                            <-Map 11 [SIMPLE_EDGE]
                               SHUFFLE [RS_101]
                                 PartitionCols:_col0
                                 Select Operator [SEL_67] (rows=25/25 width=175)
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_153] (rows=25/25 width=175)
                                     predicate:(key is not null and value is not null)
-                                    TableScan [TS_65] (rows=25/25 width=175)
+                                    TableScan [TS_12] (rows=25/25 width=175)
                                       default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                            <-Map 27 [SIMPLE_EDGE]
+                            <-Map 25 [SIMPLE_EDGE]
                               SHUFFLE [RS_102]
                                 PartitionCols:_col0
                                 Select Operator [SEL_70] (rows=25/25 width=175)
@@ -1499,15 +1493,15 @@ Stage-5
                                     predicate:key is not null
                                     TableScan [TS_68] (rows=25/25 width=175)
                                       default@src1,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                        <-Reducer 34 [SIMPLE_EDGE]
+                        <-Reducer 32 [SIMPLE_EDGE]
                           SHUFFLE [RS_105]
                             PartitionCols:_col1
                             Select Operator [SEL_100] (rows=1525/319 width=178)
                               Output:["_col1"]
                               Group By Operator [GBY_99] (rows=1525/319 width=178)
                                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                              <-Union 33 [SIMPLE_EDGE]
-                                <-Map 37 [CONTAINS]
+                              <-Union 31 [SIMPLE_EDGE]
+                                <-Map 35 [CONTAINS]
                                   Reduce Output Operator [RS_98]
                                     PartitionCols:_col1, _col0
                                     Select Operator [SEL_94] (rows=500/500 width=178)
@@ -1516,15 +1510,15 @@ Stage-5
                                         predicate:value is not null
                                         TableScan [TS_92] (rows=500/500 width=178)
                                           Output:["key","value"]
-                                <-Reducer 32 [CONTAINS]
+                                <-Reducer 30 [CONTAINS]
                                   Reduce Output Operator [RS_98]
                                     PartitionCols:_col1, _col0
                                     Select Operator [SEL_91] (rows=1025/319 width=178)
                                       Output:["_col0","_col1"]
                                       Group By Operator [GBY_90] (rows=1025/319 width=178)
                                         Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                      <-Union 31 [SIMPLE_EDGE]
-                                        <-Map 36 [CONTAINS]
+                                      <-Union 29 [SIMPLE_EDGE]
+                                        <-Map 34 [CONTAINS]
                                           Reduce Output Operator [RS_89]
                                             PartitionCols:_col1, _col0
                                             Select Operator [SEL_85] (rows=500/500 width=178)
@@ -1533,15 +1527,15 @@ Stage-5
                                                 predicate:value is not null
                                                 TableScan [TS_83] (rows=500/500 width=178)
                                                   Output:["key","value"]
-                                        <-Reducer 30 [CONTAINS]
+                                        <-Reducer 28 [CONTAINS]
                                           Reduce Output Operator [RS_89]
                                             PartitionCols:_col1, _col0
                                             Select Operator [SEL_82] (rows=525/319 width=178)
                                               Output:["_col0","_col1"]
                                               Group By Operator [GBY_81] (rows=525/319 width=178)
                                                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                              <-Union 29 [SIMPLE_EDGE]
-                                                <-Map 28 [CONTAINS]
+                                              <-Union 27 [SIMPLE_EDGE]
+                                                <-Map 26 [CONTAINS]
                                                   Reduce Output Operator [RS_80]
                                                     PartitionCols:_col1, _col0
                                                     Select Operator [SEL_73] (rows=25/25 width=175)
@@ -1550,7 +1544,7 @@ Stage-5
                                                         predicate:value is not null
                                                         TableScan [TS_71] (rows=25/25 width=175)
                                                           Output:["key","value"]
-                                                <-Map 35 [CONTAINS]
+                                                <-Map 33 [CONTAINS]
                                                   Reduce Output Operator [RS_80]
                                                     PartitionCols:_col1, _col0
                                                     Select Operator [SEL_76] (rows=500/500 width=178)
@@ -1565,14 +1559,14 @@ Stage-5
                       Group By Operator [GBY_63] (rows=5987/309 width=178)
                         Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
                       <-Union 6 [SIMPLE_EDGE]
-                        <-Reducer 19 [CONTAINS]
+                        <-Reducer 16 [CONTAINS]
                           Reduce Output Operator [RS_62]
                             PartitionCols:_col0, _col1
                             Select Operator [SEL_58] (rows=5839/1056 width=178)
                               Output:["_col0","_col1"]
                               Merge Join Operator [MERGEJOIN_162] (rows=5839/1056 width=178)
                                 Conds:RS_55._col2=RS_56._col0(Inner),Output:["_col2","_col5"]
-                              <-Map 23 [SIMPLE_EDGE]
+                              <-Map 24 [SIMPLE_EDGE]
                                 SHUFFLE [RS_56]
                                   PartitionCols:_col0
                                   Select Operator [SEL_51] (rows=500/500 width=178)
@@ -1581,29 +1575,29 @@ Stage-5
                                       predicate:key is not null
                                       TableScan [TS_49] (rows=500/500 width=178)
                                         default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                              <-Reducer 18 [SIMPLE_EDGE]
+                              <-Reducer 15 [SIMPLE_EDGE]
                                 SHUFFLE [RS_55]
                                   PartitionCols:_col2
                                   Merge Join Operator [MERGEJOIN_161] (rows=2394/512 width=87)
                                     Conds:RS_52._col1=RS_53._col1(Inner),Output:["_col2"]
-                                  <-Map 22 [SIMPLE_EDGE]
+                                  <-Map 14 [SIMPLE_EDGE]
                                     SHUFFLE [RS_53]
                                       PartitionCols:_col1
                                       Select Operator [SEL_48] (rows=500/500 width=178)
                                         Output:["_col0","_col1"]
                                         Filter Operator [FIL_151] (rows=500/500 width=178)
                                           predicate:(key is not null and value is not null)
-                                          TableScan [TS_46] (rows=500/500 width=178)
-                                            default@src,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                                  <-Reducer 17 [SIMPLE_EDGE]
+                                          TableScan [TS_15] (rows=500/500 width=178)
+                                            default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                  <-Reducer 21 [SIMPLE_EDGE]
                                     SHUFFLE [RS_52]
                                       PartitionCols:_col1
                                       Select Operator [SEL_45] (rows=1025/319 width=178)
                                         Output:["_col1"]
                                         Group By Operator [GBY_44] (rows=1025/319 width=178)
                                           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                        <-Union 16 [SIMPLE_EDGE]
-                                          <-Map 21 [CONTAINS]
+                                        <-Union 20 [SIMPLE_EDGE]
+                                          <-Map 23 [CONTAINS]
                                             Reduce Output Operator [RS_43]
                                               PartitionCols:_col1, _col0
                                               Select Operator [SEL_39] (rows=500/500 width=178)
@@ -1612,15 +1606,15 @@ Stage-5
                                                   predicate:value is not null
                                                   TableScan [TS_37] (rows=500/500 width=178)
                                                     Output:["key","value"]
-                                          <-Reducer 15 [CONTAINS]
+                                          <-Reducer 19 [CONTAINS]
                                             Reduce Output Operator [RS_43]
                                               PartitionCols:_col1, _col0
                                               Select Operator [SEL_36] (rows=525/319 width=178)
                                                 Output:["_col0","_col1"]
                                                 Group By Operator [GBY_35] (rows=525/319 width=178)
                                                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                                <-Union 14 [SIMPLE_EDGE]
-                                                  <-Map 13 [CONTAINS]
+                                                <-Union 18 [SIMPLE_EDGE]
+                                                  <-Map 17 [CONTAINS]
                                                     Reduce Output Operator [RS_34]
                                                       PartitionCols:_col1, _col0
                                                       Select Operator [SEL_27] (rows=25/25 width=175)
@@ -1629,7 +1623,7 @@ Stage-5
                                                           predicate:value is not null
                                                           TableScan [TS_25] (rows=25/25 width=175)
                                                             Output:["key","value"]
-                                                  <-Map 20 [CONTAINS]
+                                                  <-Map 22 [CONTAINS]
                                                     Reduce Output Operator [RS_34]
                                                       PartitionCols:_col1, _col0
                                                       Select Operator [SEL_30] (rows=500/500 width=178)
@@ -1645,15 +1639,14 @@ Stage-5
                               Output:["_col0","_col1"]
                               Merge Join Operator [MERGEJOIN_160] (rows=148/61 width=177)
                                 Conds:RS_21._col2=RS_22._col0(Inner),Output:["_col2","_col5"]
-                              <-Map 12 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_22]
                                   PartitionCols:_col0
                                   Select Operator [SEL_17] (rows=500/500 width=178)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_147] (rows=500/500 width=178)
                                       predicate:key is not null
-                                      TableScan [TS_15] (rows=500/500 width=178)
-                                        default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                       Please refer to the previous TableScan [TS_15]
                               <-Reducer 4 [SIMPLE_EDGE]
                                 SHUFFLE [RS_21]
                                   PartitionCols:_col2
@@ -1666,8 +1659,7 @@ Stage-5
                                         Output:["_col0","_col1"]
                                         Filter Operator [FIL_146] (rows=25/25 width=175)
                                           predicate:(key is not null and value is not null)
-                                          TableScan [TS_12] (rows=25/25 width=175)
-                                            default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                           Please refer to the previous TableScan [TS_12]
                                   <-Reducer 3 [SIMPLE_EDGE]
                                     SHUFFLE [RS_18]
                                       PartitionCols:_col1

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
index 087f916..e5c8d6c 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
@@ -827,7 +827,7 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Map 2 <- Map 1 (CUSTOM_EDGE)
+Map 2 <- Map 1 (BROADCAST_EDGE)
 
 Stage-0
   Fetch Operator
@@ -838,9 +838,9 @@ Stage-0
         Select Operator [SEL_9] (rows=550/480 width=18)
           Output:["_col0","_col1","_col2"]
           Map Join Operator [MAPJOIN_25] (rows=550/480 width=18)
-            BucketMapJoin:true,Conds:RS_6._col0=SEL_5._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
-          <-Map 1 [CUSTOM_EDGE]
-            MULTICAST [RS_6]
+            Conds:RS_6._col0=SEL_5._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
+          <-Map 1 [BROADCAST_EDGE]
+            BROADCAST [RS_6]
               PartitionCols:_col0
               Select Operator [SEL_2] (rows=242/242 width=18)
                 Output:["_col0","_col1"]

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index adcff44..65c9114 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@ -655,7 +655,7 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Map 2 <- Map 1 (CUSTOM_EDGE)
+Map 2 <- Map 1 (BROADCAST_EDGE)
 
 Stage-0
   Fetch Operator
@@ -666,9 +666,9 @@ Stage-0
         Select Operator [SEL_33] (rows=550 width=18)
           Output:["_col0","_col1","_col2"]
           Map Join Operator [MAPJOIN_32] (rows=550 width=18)
-            BucketMapJoin:true,Conds:RS_29._col0=SEL_31._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
-          <-Map 1 [CUSTOM_EDGE] vectorized
-            MULTICAST [RS_29]
+            Conds:RS_29._col0=SEL_31._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
+          <-Map 1 [BROADCAST_EDGE] vectorized
+            BROADCAST [RS_29]
               PartitionCols:_col0
               Select Operator [SEL_28] (rows=242 width=18)
                 Output:["_col0","_col1"]


[29/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan) (addendum)

Posted by we...@apache.org.
HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan) (addendum)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/877bbf0e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/877bbf0e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/877bbf0e

Branch: refs/heads/hive-14535
Commit: 877bbf0e5458f7b8a872511faa222e6992167e86
Parents: 59f6577
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Sat May 13 08:15:27 2017 +0100
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Sat May 13 08:15:27 2017 +0100

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java   | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/877bbf0e/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
index d04fc64..5964fd4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedScanOptimizer.java
@@ -322,7 +322,7 @@ public class SharedScanOptimizer extends Transform {
     // work when the TS operators are merged. This is due to an assumption in
     // MergeJoinProc that needs to be further explored.
     // If any of these conditions are not met, we cannot merge.
-    // TODO: Extend rule so it can be apply for these cases.
+    // TODO: Extend rule so it can be applied for these cases.
     final Set<Operator<?>> workOps1 = findWorkOperators(optimizerCache, prevTsOp);
     final Set<Operator<?>> workOps2 = findWorkOperators(optimizerCache, tsOp);
     boolean foundDummyStoreOp = false;
@@ -450,14 +450,14 @@ public class SharedScanOptimizer extends Transform {
             set.addAll(findWorkOperators(optimizerCache, child));
           }
         }
-        // Semijoin DPP work is considered an child because work needs
+        // Semijoin DPP work is considered a child because work needs
         // to finish for it to execute
         SemiJoinBranchInfo sjbi = pctx.getRsToSemiJoinBranchInfo().get(op);
         if (sjbi != null) {
           set.addAll(findWorkOperators(optimizerCache, sjbi.getTsOp()));
         }
       } else if(op.getConf() instanceof DynamicPruningEventDesc) {
-        // DPP work is considered an child because work needs
+        // DPP work is considered a child because work needs
         // to finish for it to execute
         set.addAll(findWorkOperators(
                 optimizerCache, ((DynamicPruningEventDesc) op.getConf()).getTableScan()));


[23/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_notin.q.out b/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
index d89361d..6a8a3f9 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
@@ -25,9 +25,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -62,13 +62,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key > '2') (type: boolean)
                     Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
@@ -138,7 +131,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -324,12 +317,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
+        Reducer 6 <- Map 1 (SIMPLE_EDGE)
+        Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -347,13 +340,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col1 (type: string)
                       Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string), _col2 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (p_mfgr = p_mfgr) (type: boolean)
                     Statistics: Num rows: 13 Data size: 2899 Basic stats: COMPLETE Column stats: COMPLETE
@@ -364,13 +350,6 @@ STAGE PLANS:
                       Statistics: Num rows: 13 Data size: 2899 Basic stats: COMPLETE Column stats: COMPLETE
                       TopN Hash Memory Usage: 0.1
                       value expressions: p_name (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (p_mfgr = p_mfgr) (type: boolean)
                     Statistics: Num rows: 13 Data size: 2899 Basic stats: COMPLETE Column stats: COMPLETE
@@ -425,7 +404,7 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -472,7 +451,7 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -487,7 +466,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -532,7 +511,7 @@ STAGE PLANS:
                           sort order: ++
                           Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                           Statistics: Num rows: 2 Data size: 438 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 9 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -627,11 +606,11 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -660,13 +639,6 @@ STAGE PLANS:
                     Map-reduce partition columns: p_mfgr (type: string)
                     Statistics: Num rows: 26 Data size: 2652 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 0.1
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2652 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: p_mfgr (type: string), p_size (type: int)
                     sort order: ++
@@ -778,7 +750,7 @@ STAGE PLANS:
                     sort order: 
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -822,7 +794,7 @@ STAGE PLANS:
                           sort order: 
                           Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col0 (type: struct<count:bigint,sum:double,input:int>)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -919,15 +891,15 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
-        Reducer 12 <- Map 11 (SIMPLE_EDGE)
-        Reducer 13 <- Reducer 12 (SIMPLE_EDGE)
+        Reducer 10 <- Map 5 (SIMPLE_EDGE)
+        Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 13 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 5 (SIMPLE_EDGE)
+        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -947,7 +919,7 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col2 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 11 
+        Map 5 
             Map Operator Tree:
                 TableScan
                   alias: part
@@ -961,13 +933,6 @@ STAGE PLANS:
                       Map-reduce partition columns: p_mfgr (type: string)
                       Statistics: Num rows: 13 Data size: 1326 Basic stats: COMPLETE Column stats: COMPLETE
                       TopN Hash Memory Usage: 0.1
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2652 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (p_mfgr = p_mfgr) (type: boolean)
                     Statistics: Num rows: 13 Data size: 1326 Basic stats: COMPLETE Column stats: COMPLETE
@@ -977,13 +942,6 @@ STAGE PLANS:
                       Map-reduce partition columns: p_mfgr (type: string)
                       Statistics: Num rows: 13 Data size: 1326 Basic stats: COMPLETE Column stats: COMPLETE
                       TopN Hash Memory Usage: 0.1
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2652 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (p_mfgr = p_mfgr) (type: boolean)
                     Statistics: Num rows: 13 Data size: 1326 Basic stats: COMPLETE Column stats: COMPLETE
@@ -998,27 +956,6 @@ STAGE PLANS:
         Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
-              Group By Operator
-                aggregations: min(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: count(), count(_col1)
-                  keys: _col0 (type: string)
-                  mode: complete
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 12 
-            Execution mode: llap
-            Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: int)
                 outputColumnNames: _col2, _col5
@@ -1063,7 +1000,7 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: int)
-        Reducer 13 
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1220,7 +1157,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1267,6 +1204,27 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: int)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: count(), count(_col1)
+                  keys: _col0 (type: string)
+                  mode: complete
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: bigint), _col2 (type: bigint)
 
   Stage: Stage-0
     Fetch Operator
@@ -1525,10 +1483,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1549,15 +1507,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  properties:
-                    insideView TRUE
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key < '11') (type: boolean)
                     Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1574,15 +1523,6 @@ STAGE PLANS:
                           sort order: 
                           Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  properties:
-                    insideView TRUE
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((key < '11') and CASE WHEN ((key > '104')) THEN (null) ELSE ((key < '11')) END) (type: boolean)
                     Statistics: Num rows: 83 Data size: 7221 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1644,7 +1584,7 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1656,7 +1596,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1710,9 +1650,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1759,13 +1699,6 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: bigint), _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1842,7 +1775,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1920,11 +1853,11 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1963,13 +1896,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_type (type: string), p_size (type: int)
                     outputColumnNames: p_type, p_size
@@ -2064,7 +1990,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2087,7 +2013,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2152,11 +2078,11 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2195,13 +2121,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_partkey (type: int), p_type (type: string)
                     outputColumnNames: p_partkey, p_type
@@ -2296,7 +2215,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2319,7 +2238,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2391,13 +2310,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
+        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 11 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 10 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2417,23 +2336,6 @@ STAGE PLANS:
                       value expressions: _col1 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: e
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: p_size (type: int)
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 5 
             Map Operator Tree:
                 TableScan
@@ -2454,13 +2356,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1781 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint), _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 3250 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 3250 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2476,7 +2371,24 @@ STAGE PLANS:
                         Statistics: Num rows: 13 Data size: 1625 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
+        Map 9 
+            Map Operator Tree:
+                TableScan
+                  alias: e
+                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: p_size (type: int)
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: int)
+                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2562,7 +2474,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 13 Data size: 1781 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2580,7 +2492,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: int)
                     Statistics: Num rows: 13 Data size: 1677 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: string), _col2 (type: boolean)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -2628,11 +2540,11 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2671,13 +2583,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: double)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2912 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_type (type: string), p_retailprice (type: double)
                     outputColumnNames: p_type, p_retailprice
@@ -2772,7 +2677,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2795,7 +2700,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: bigint)
                       Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2843,14 +2748,14 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-        Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
-        Reducer 13 <- Map 12 (SIMPLE_EDGE)
+        Reducer 10 <- Map 9 (SIMPLE_EDGE)
+        Reducer 11 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 5 <- Map 4 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2870,24 +2775,7 @@ STAGE PLANS:
                       value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col6 (type: string), _col7 (type: double), _col8 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 12 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: p_size (type: int)
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
+        Map 4 
             Map Operator Tree:
                 TableScan
                   alias: p
@@ -2902,9 +2790,19 @@ STAGE PLANS:
                       Map-reduce partition columns: _col2 (type: int), _col0 (type: int)
                       Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
+                  Select Operator
+                    expressions: p_partkey (type: int), p_name (type: string), p_size (type: int)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col2 (type: int), _col0 (type: int)
+                      sort order: ++
+                      Map-reduce partition columns: _col2 (type: int), _col0 (type: int)
+                      Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 9 
             Map Operator Tree:
                 TableScan
                   alias: part
@@ -2919,68 +2817,32 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: p_partkey (type: int), p_name (type: string), p_size (type: int)
-                    outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: p_size (type: int)
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
                     Reduce Output Operator
-                      key expressions: _col2 (type: int), _col0 (type: int)
-                      sort order: ++
-                      Map-reduce partition columns: _col2 (type: int), _col0 (type: int)
-                      Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col1 (type: string)
+                      key expressions: _col0 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: int)
+                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 _col2 (type: int), _col0 (type: int)
-                  1 _col0 (type: int), (_col0 + 121150) (type: int)
-                outputColumnNames: _col1, _col3
-                Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  keys: _col1 (type: string), _col3 (type: int)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: int)
-                    sort order: ++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                    Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
               Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: int)
+                keys: KEY._col0 (type: int)
                 mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: _col0 is not null (type: boolean)
-                  Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: int), true (type: boolean)
-                    outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 1 Data size: 129 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string), _col1 (type: int)
-                      sort order: ++
-                      Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 1 Data size: 129 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col2 (type: boolean)
-        Reducer 13 
+                outputColumnNames: _col0
+                Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), (_col0 + 121150) (type: int)
+                  sort order: ++
+                  Map-reduce partition columns: _col0 (type: int), (_col0 + 121150) (type: int)
+                  Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3073,19 +2935,48 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 7 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 _col2 (type: int), _col0 (type: int)
+                  1 _col0 (type: int), (_col0 + 121150) (type: int)
+                outputColumnNames: _col1, _col3
+                Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  keys: _col1 (type: string), _col3 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string), _col1 (type: int)
+                    sort order: ++
+                    Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
+                    Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
         Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
-                keys: KEY._col0 (type: int)
+                keys: KEY._col0 (type: string), KEY._col1 (type: int)
                 mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int), (_col0 + 121150) (type: int)
-                  sort order: ++
-                  Map-reduce partition columns: _col0 (type: int), (_col0 + 121150) (type: int)
-                  Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
+                Filter Operator
+                  predicate: _col0 is not null (type: boolean)
+                  Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: int), true (type: boolean)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 1 Data size: 129 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: int)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
+                      Statistics: Num rows: 1 Data size: 129 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col2 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -3139,9 +3030,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3181,13 +3072,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
                         Statistics: Num rows: 3 Data size: 72 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint), _col3 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: p
-                  Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((p_size = p_size) and (p_partkey = p_partkey)) (type: boolean)
                     Statistics: Num rows: 6 Data size: 774 Basic stats: COMPLETE Column stats: COMPLETE
@@ -3260,7 +3144,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
                   Statistics: Num rows: 3 Data size: 72 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col2 (type: bigint), _col3 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3314,9 +3198,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3360,13 +3244,6 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 12 Data size: 1296 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: bigint), _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_brand is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
@@ -3443,7 +3320,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 12 Data size: 1296 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3492,14 +3369,14 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-        Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
-        Reducer 13 <- Map 12 (SIMPLE_EDGE)
+        Reducer 10 <- Map 9 (SIMPLE_EDGE)
+        Reducer 11 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 5 <- Map 4 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 4 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3519,27 +3396,6 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 12 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: (p_size + 1) (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 4 
             Map Operator Tree:
                 TableScan
@@ -3555,9 +3411,19 @@ STAGE PLANS:
                       Map-reduce partition columns: (_col1 + 1) (type: int)
                       Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
+                  Select Operator
+                    expressions: p_type (type: string), p_size (type: int)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: (_col1 + 1) (type: int)
+                      sort order: +
+                      Map-reduce partition columns: (_col1 + 1) (type: int)
+                      Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 9 
             Map Operator Tree:
                 TableScan
                   alias: part
@@ -3576,68 +3442,36 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: p_type (type: string), p_size (type: int)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: (_col1 + 1) (type: int)
-                      sort order: +
-                      Map-reduce partition columns: (_col1 + 1) (type: int)
-                      Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: string)
+                    expressions: (p_size + 1) (type: int)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      keys: _col0 (type: int)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 (_col1 + 1) (type: int)
-                  1 _col0 (type: int)
-                outputColumnNames: _col0, _col2
-                Statistics: Num rows: 18 Data size: 1944 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  keys: _col0 (type: string), _col2 (type: int)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: int)
-                    sort order: ++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                    Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
               Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: int)
+                keys: KEY._col0 (type: int)
                 mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: _col0 is not null (type: boolean)
-                  Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: int), true (type: boolean)
-                    outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 9 Data size: 1008 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string), _col1 (type: int)
-                      sort order: ++
-                      Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 9 Data size: 1008 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col2 (type: boolean)
-        Reducer 13 
+                outputColumnNames: _col0
+                Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: int)
+                  Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3730,19 +3564,48 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 9 Data size: 180 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 7 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 (_col1 + 1) (type: int)
+                  1 _col0 (type: int)
+                outputColumnNames: _col0, _col2
+                Statistics: Num rows: 18 Data size: 1944 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  keys: _col0 (type: string), _col2 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string), _col1 (type: int)
+                    sort order: ++
+                    Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
+                    Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
         Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
-                keys: KEY._col0 (type: int)
+                keys: KEY._col0 (type: string), KEY._col1 (type: int)
                 mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: int)
-                  Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
+                Filter Operator
+                  predicate: _col0 is not null (type: boolean)
+                  Statistics: Num rows: 9 Data size: 972 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: int), true (type: boolean)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 9 Data size: 1008 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: int)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
+                      Statistics: Num rows: 9 Data size: 1008 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col2 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -3798,13 +3661,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-        Reducer 5 <- Reducer 11 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 6 (SIMPLE_EDGE)
+        Reducer 9 <- Map 6 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3822,30 +3685,6 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: (key = '90') (type: boolean)
-                    Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-                      Group By Operator
-                        aggregations: count()
-                        keys: '90' (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                          value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 6 
             Map Operator Tree:
                 TableScan
@@ -3864,13 +3703,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     keys: key (type: string)
                     mode: hash
@@ -3879,40 +3711,27 @@ STAGE PLANS:
                     Reduce Output Operator
                       key expressions: _col0 (type: string)
                       sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: bigint)
-                  outputColumnNames: _col1
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
-                    predicate: _col1 is not null (type: boolean)
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    predicate: (key = '90') (type: boolean)
+                    Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
-                      expressions: _col1 (type: bigint)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
-                        keys: _col0 (type: bigint)
+                        aggregations: count()
+                        keys: '90' (type: string)
                         mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
-                          key expressions: _col0 (type: bigint)
+                          key expressions: _col0 (type: string)
                           sort order: +
-                          Map-reduce partition columns: _col0 (type: bigint)
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -4008,7 +3827,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4026,6 +3845,36 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 205 Data size: 18655 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: boolean)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: _col1 is not null (type: boolean)
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: bigint)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        keys: _col0 (type: bigint)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: bigint)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: bigint)
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
 
   Stage: Stage-0
     Fetch Operator
@@ -4086,12 +3935,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-        Reducer 5 <- Reducer 11 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 6 <- Map 1 (SIMPLE_EDGE)
+        Reducer 7 <- Map 1 (SIMPLE_EDGE)
         Reducer 9 <- Map 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -4110,37 +3959,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: (key = '90') (type: boolean)
-                    Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-                      Group By Operator
-                        aggregations: count()
-                        keys: '90' (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                          value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: sc
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4160,13 +3978,6 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 205 Data size: 21115 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col1 (type: bigint), _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: sc
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4186,36 +3997,30 @@ STAGE PLANS:
                           Statistics: Num rows: 250 Data size: 67750 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: bigint)
-                  outputColumnNames: _col1
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+        Map 8 
+            Map Operator Tree:
+                TableScan
+                  alias: s1
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
-                    predicate: _col1 is not null (type: boolean)
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    predicate: (key = '90') (type: boolean)
+                    Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
-                      expressions: _col1 (type: bigint)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
-                        keys: _col0 (type: bigint)
+                        aggregations: count()
+                        keys: '90' (type: string)
                         mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
                         Reduce Output Operator
-                          key expressions: _col0 (type: bigint)
+                          key expressions: _col0 (type: string)
                           sort order: +
-                          Map-reduce partition columns: _col0 (type: bigint)
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -4299,7 +4104,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4314,7 +4119,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 205 Data size: 21115 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
-        Reducer 9 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4339,6 +4144,36 @@ STAGE PLANS:
                         Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
                         Statistics: Num rows: 250 Data size: 68750 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: boolean)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: _col1 is not null (type: boolean)
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: bigint)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        keys: _col0 (type: bigint)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: bigint)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: bigint)
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
 
   Stage: Stage-0
     Fetch Operator
@@ -4399,13 +4234,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 7 <- Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 5 (SIMPLE_EDGE)
+        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4444,13 +4279,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_type (type: string), p_size (type: int)
                     outputColumnNames: p_type, p_size
@@ -4469,24 +4297,6 @@ STAGE PLANS:
                         value expressions: _col1 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: int), true (type: boolean)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: int)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: int)
-                    Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -4575,7 +4385,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4598,6 +4408,24 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col0 (type: int), true (type: boolean)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -4644,13 +4472,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 7 <- Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 5 (SIMPLE_EDGE)
+        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4689,13 +4517,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_type (type: string), p_size (type: int)
                     outputColumnNames: p_type, p_size
@@ -4710,28 +4531,10 @@ STAGE PLANS:
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col1 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: int), true (type: boolean)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: int)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: int)
-                    Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: boolean)
+                        Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col1 (type: int)
+            Execution mode: llap
+            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -4824,7 +4627,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4847,6 +4650,24 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: int)
                       Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col0 (type: int), true (type: boolean)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: int)
+                    Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -5595,13 +5416,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
+        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 11 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 10 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5621,23 +5442,6 @@ STAGE PLANS:
                       value expressions: _col1 (type: int)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: e
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: p_size (type: int)
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 5 
             Map Operator Tree:
                 TableScan
@@ -5658,13 +5462,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 12 Data size: 1296 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint), _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_brand is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
@@ -5680,7 +5477,24 @@ STAGE PLANS:
                         Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
+        Map 9 
+            Map Operator Tree:
+                TableScan
+                  alias: e
+                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: p_size (type: int)
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key

<TRUNCATED>

[47/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan) (addendum II)

Posted by we...@apache.org.
HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan) (addendum II)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c19981c6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c19981c6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c19981c6

Branch: refs/heads/hive-14535
Commit: c19981c66070b51df8c9129da69407c8be586d9d
Parents: 360a91e
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Tue May 16 13:37:45 2017 +0100
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Tue May 16 13:38:07 2017 +0100

----------------------------------------------------------------------
 .../test/results/clientpositive/tez/explainanalyze_3.q.out   | 8 ++++----
 ql/src/test/results/clientpositive/tez/explainuser_3.q.out   | 8 ++++----
 2 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/c19981c6/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
index e5c8d6c..087f916 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
@@ -827,7 +827,7 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Map 2 <- Map 1 (BROADCAST_EDGE)
+Map 2 <- Map 1 (CUSTOM_EDGE)
 
 Stage-0
   Fetch Operator
@@ -838,9 +838,9 @@ Stage-0
         Select Operator [SEL_9] (rows=550/480 width=18)
           Output:["_col0","_col1","_col2"]
           Map Join Operator [MAPJOIN_25] (rows=550/480 width=18)
-            Conds:RS_6._col0=SEL_5._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
-          <-Map 1 [BROADCAST_EDGE]
-            BROADCAST [RS_6]
+            BucketMapJoin:true,Conds:RS_6._col0=SEL_5._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
+          <-Map 1 [CUSTOM_EDGE]
+            MULTICAST [RS_6]
               PartitionCols:_col0
               Select Operator [SEL_2] (rows=242/242 width=18)
                 Output:["_col0","_col1"]

http://git-wip-us.apache.org/repos/asf/hive/blob/c19981c6/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index 65c9114..adcff44 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@ -655,7 +655,7 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Map 2 <- Map 1 (BROADCAST_EDGE)
+Map 2 <- Map 1 (CUSTOM_EDGE)
 
 Stage-0
   Fetch Operator
@@ -666,9 +666,9 @@ Stage-0
         Select Operator [SEL_33] (rows=550 width=18)
           Output:["_col0","_col1","_col2"]
           Map Join Operator [MAPJOIN_32] (rows=550 width=18)
-            Conds:RS_29._col0=SEL_31._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
-          <-Map 1 [BROADCAST_EDGE] vectorized
-            BROADCAST [RS_29]
+            BucketMapJoin:true,Conds:RS_29._col0=SEL_31._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"]
+          <-Map 1 [CUSTOM_EDGE] vectorized
+            MULTICAST [RS_29]
               PartitionCols:_col0
               Select Operator [SEL_28] (rows=242 width=18)
                 Output:["_col0","_col1"]


[38/50] [abbrv] hive git commit: Revert "HIVE-16501 : Add rej/orig to .gitignore"

Posted by we...@apache.org.
Revert "HIVE-16501 : Add rej/orig to .gitignore"

Note: removal of *.orig files have been kept
This reverts commit c911f42035d9e7b91191e2683f85d8fd2a35eb27.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/413245ed
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/413245ed
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/413245ed

Branch: refs/heads/hive-14535
Commit: 413245ed90f09ab446711ac37f32591e3ac68462
Parents: 0efb936
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Mon May 15 20:31:15 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Mon May 15 20:33:15 2017 +0200

----------------------------------------------------------------------
 .gitignore | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/413245ed/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 8578a64..47c59da 100644
--- a/.gitignore
+++ b/.gitignore
@@ -29,5 +29,3 @@ hcatalog/webhcat/svr/target
 conf/hive-default.xml.template
 itests/hive-blobstore/src/test/resources/blobstore-conf.xml
 .DS_Store
-*.rej
-*.orig


[24/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_in.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_in.q.out b/ql/src/test/results/clientpositive/llap/subquery_in.q.out
index 58e78c4..1e69d86 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_in.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_in.q.out
@@ -2598,11 +2598,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
+        Reducer 7 <- Map 6 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2619,13 +2619,6 @@ STAGE PLANS:
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: sc
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2641,7 +2634,7 @@ STAGE PLANS:
                         Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: s1
@@ -2724,7 +2717,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2741,7 +2734,7 @@ STAGE PLANS:
                     sort order: ++
                     Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
                     Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3991,9 +3984,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4031,13 +4024,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: pp
-                  Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4117,7 +4103,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4465,12 +4451,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
+        Reducer 8 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4508,13 +4494,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1352 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: pp
-                  Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4530,13 +4509,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: pp
-                  Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2704 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4554,32 +4526,6 @@ STAGE PLANS:
                         value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: bigint), _col0 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: _col0 is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: _col0 (type: bigint), _col1 (type: string), true (type: boolean)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col1 (type: string), _col0 (type: bigint)
-                        sort order: ++
-                        Map-reduce partition columns: _col1 (type: string), _col0 (type: bigint)
-                        Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col2 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -4666,7 +4612,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4687,6 +4633,32 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 1560 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint), _col0 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: _col0 is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col0 (type: bigint), _col1 (type: string), true (type: boolean)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: string), _col0 (type: bigint)
+                        sort order: ++
+                        Map-reduce partition columns: _col1 (type: string), _col0 (type: bigint)
+                        Statistics: Num rows: 13 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col2 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -4738,12 +4710,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 9 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4803,13 +4775,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 13 Data size: 1040 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: pp
-                  Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_partkey is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4827,32 +4792,6 @@ STAGE PLANS:
                         value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: double), _col0 (type: int)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: _col0 is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: _col0 (type: double), _col1 (type: int), true (type: boolean)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col1 (type: int), _col0 (type: double)
-                        sort order: ++
-                        Map-reduce partition columns: _col1 (type: int), _col0 (type: double)
-                        Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col2 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -4960,6 +4899,32 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: int)
                     Statistics: Num rows: 13 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 156 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double), _col0 (type: int)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: _col0 is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col0 (type: double), _col1 (type: int), true (type: boolean)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: int), _col0 (type: double)
+                        sort order: ++
+                        Map-reduce partition columns: _col1 (type: int), _col0 (type: double)
+                        Statistics: Num rows: 13 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col2 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
index 95c78f5..3920ac6 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
@@ -261,12 +261,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 8 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -307,13 +307,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part_null
-                  Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: p_name (type: string)
                     outputColumnNames: p_name
@@ -327,13 +320,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part_null
-                  Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     keys: p_name (type: string)
                     mode: hash
@@ -346,24 +332,6 @@ STAGE PLANS:
                       Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), true (type: boolean)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -434,7 +402,7 @@ STAGE PLANS:
                   sort order: +
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -446,6 +414,24 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), true (type: boolean)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -476,12 +462,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 9 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -542,13 +528,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part_null
-                  Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     keys: p_type (type: string)
                     mode: hash
@@ -561,24 +540,6 @@ STAGE PLANS:
                       Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), true (type: boolean)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -661,6 +622,24 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), true (type: boolean)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -717,12 +696,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 9 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -783,13 +762,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part_null
-                  Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
                     Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
@@ -805,24 +777,6 @@ STAGE PLANS:
                         Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), true (type: boolean)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -905,6 +859,24 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), true (type: boolean)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -936,10 +908,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Map 9 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 4 <- Map 8 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -978,13 +950,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: tempty
-                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   Group By Operator
                     keys: c (type: char(2))
                     mode: hash
@@ -997,7 +962,7 @@ STAGE PLANS:
                       Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
                   alias: part_null
@@ -1092,7 +1057,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1698,15 +1663,15 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
-        Reducer 13 <- Map 12 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-        Reducer 14 <- Reducer 13 (SIMPLE_EDGE)
-        Reducer 16 <- Map 15 (SIMPLE_EDGE)
+        Reducer 10 <- Map 7 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+        Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+        Reducer 13 <- Map 12 (SIMPLE_EDGE)
+        Reducer 14 <- Map 12 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 14 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+        Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
         Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -1727,7 +1692,7 @@ STAGE PLANS:
                       value expressions: _col0 (type: int), _col2 (type: string), _col3 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
+        Map 12 
             Map Operator Tree:
                 TableScan
                   alias: pp
@@ -1745,33 +1710,6 @@ STAGE PLANS:
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 12 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: p_type is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: p_brand (type: string), p_type (type: string), p_container (type: string)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col1 (type: string), _col0 (type: string)
-                        sort order: ++
-                        Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
-                        Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col2 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 15 
-            Map Operator Tree:
-                TableScan
-                  alias: pp
-                  Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1825,26 +1763,22 @@ STAGE PLANS:
                         Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
                         Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: string)
+                  Filter Operator
+                    predicate: p_type is not null (type: boolean)
+                    Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: p_brand (type: string), p_type (type: string), p_container (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: string), _col0 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
+                        Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col2 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: string), _col0 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col1 (type: string), _col0 (type: string)
-                    sort order: ++
-                    Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 13 
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -1865,7 +1799,7 @@ STAGE PLANS:
                     sort order: ++
                     Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                     Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 14 
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1886,7 +1820,24 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col2 (type: boolean)
-        Reducer 16 
+        Reducer 13 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: string), _col0 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col1 (type: string), _col0 (type: string)
+                    sort order: ++
+                    Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
+                    Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 14 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2250,15 +2201,15 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
-        Reducer 13 <- Map 12 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-        Reducer 14 <- Reducer 13 (SIMPLE_EDGE)
-        Reducer 16 <- Map 15 (SIMPLE_EDGE)
+        Reducer 10 <- Map 7 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+        Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+        Reducer 13 <- Map 12 (SIMPLE_EDGE)
+        Reducer 14 <- Map 12 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 14 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+        Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
         Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -2279,7 +2230,7 @@ STAGE PLANS:
                       value expressions: _col0 (type: int), _col2 (type: string), _col3 (type: string), _col5 (type: int), _col7 (type: double), _col8 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
+        Map 12 
             Map Operator Tree:
                 TableScan
                   alias: pp
@@ -2297,33 +2248,6 @@ STAGE PLANS:
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 12 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: p_type is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: p_brand (type: string), p_type (type: string), p_container (type: string)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col1 (type: string), _col0 (type: string)
-                        sort order: ++
-                        Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
-                        Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col2 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 15 
-            Map Operator Tree:
-                TableScan
-                  alias: pp
-                  Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_type is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 5096 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2377,26 +2301,22 @@ STAGE PLANS:
                         Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
                         Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: string)
+                  Filter Operator
+                    predicate: p_type is not null (type: boolean)
+                    Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: p_brand (type: string), p_type (type: string), p_container (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: string), _col0 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
+                        Statistics: Num rows: 26 Data size: 7488 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col2 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col1 (type: string), _col0 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col1 (type: string), _col0 (type: string)
-                    sort order: ++
-                    Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 13 
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -2417,7 +2337,7 @@ STAGE PLANS:
                     sort order: ++
                     Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                     Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 14 
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2442,7 +2362,24 @@ STAGE PLANS:
                         Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
                         Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: boolean)
-        Reducer 16 
+        Reducer 13 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: string), _col0 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col1 (type: string), _col0 (type: string)
+                    sort order: ++
+                    Map-reduce partition columns: _col1 (type: string), _col0 (type: string)
+                    Statistics: Num rows: 13 Data size: 2548 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 14 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2629,12 +2566,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
         Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 9 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2694,13 +2631,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: int)
                         Statistics: Num rows: 13 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint), _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: p_size is not null (type: boolean)
                     Statistics: Num rows: 26 Data size: 2808 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2716,27 +2646,6 @@ STAGE PLANS:
                         Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: _col0 is not null (type: boolean)
-                  Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: int), true (type: boolean)
-                    outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string), _col1 (type: int)
-                      sort order: ++
-                      Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col2 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -2824,6 +2733,27 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 13 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint), _col2 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string), KEY._col1 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
+                Filter Operator
+                  predicate: _col0 is not null (type: boolean)
+                  Statistics: Num rows: 13 Data size: 1404 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: int), true (type: boolean)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: int)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
+                      Statistics: Num rows: 13 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col2 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -3066,12 +2996,12 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 10 <- Map 9 (CUSTOM_SIMPLE_EDGE)
-        Reducer 12 <- Map 11 (CUSTOM_SIMPLE_EDGE)
+        Reducer 11 <- Map 9 (CUSTOM_SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
         Reducer 3 <- Map 5 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
         Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE), Reducer 10 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Reducer 12 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 11 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3094,26 +3024,6 @@ STAGE PLANS:
                         Statistics: Num rows: 50 Data size: 200 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 11 
-            Map Operator Tree:
-                TableScan
-                  alias: lineitem
-                  Statistics: Num rows: 100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: l_quantity (type: double)
-                    outputColumnNames: l_quantity
-                    Statistics: Num rows: 100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: avg(l_quantity)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: struct<count:bigint,sum:double,input:double>)
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 5 
             Map Operator Tree:
                 TableScan
@@ -3170,6 +3080,19 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: struct<count:bigint,sum:double,input:double>)
+                  Select Operator
+                    expressions: l_quantity (type: double)
+                    outputColumnNames: l_quantity
+                    Statistics: Num rows: 100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: avg(l_quantity)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: struct<count:bigint,sum:double,input:double>)
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 10 
@@ -3189,7 +3112,7 @@ STAGE PLANS:
                     sort order: 
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 12 
+        Reducer 11 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3351,13 +3274,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
+        Reducer 6 <- Map 1 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+        Reducer 9 <- Map 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3374,33 +3297,6 @@ STAGE PLANS:
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: s2
-                  Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: value is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      keys: value (type: string)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: value is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -3414,13 +3310,6 @@ STAGE PLANS:
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key > '9') (type: boolean)
                     Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
@@ -3436,19 +3325,26 @@ STAGE PLANS:
                         value expressions: _col0 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
+        Map 8 
+            Map Operator Tree:
+                TableScan
+                  alias: s2
+                  Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: value is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      keys: value (type: string)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
+            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -3508,7 +3404,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3521,7 +3417,7 @@ STAGE PLANS:
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                   Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -3544,7 +3440,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 69 Data size: 6555 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 9 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3574,6 +3470,19 @@ STAGE PLANS:
                           sort order: +
                           Map-reduce partition columns: _col0 (type: bigint)
                           Statistics: Num rows: 34 Data size: 272 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 214 Data size: 19474 Basic stats: COMPLETE Column stats: COMPLETE
 
   Stage: Stage-0
     Fetch Operator
@@ -4083,9 +3992,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4121,13 +4030,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: tnull
-                  Statistics: Num rows: 3 Data size: 14 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     keys: i (type: int)
                     mode: hash
@@ -4194,7 +4096,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4251,13 +4153,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
+        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
         Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 9 <- Map 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 6 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4275,25 +4177,6 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      keys: true (type: boolean)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: boolean)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: boolean)
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 6 
             Map Operator Tree:
                 TableScan
@@ -4313,13 +4196,6 @@ STAGE PLANS:
                           sort order: 
                           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key > '9') (type: boolean)
                     Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
@@ -4335,7 +4211,26 @@ STAGE PLANS:
                         Statistics: Num rows: 69 Data size: 6003 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
+        Map 9 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      keys: true (type: boolean)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: boolean)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: boolean)
+                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -4431,7 +4326,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint)
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator


[48/50] [abbrv] hive git commit: HIVE-16647: Improve the validation output to make the output to stderr and stdout more consistent (Aihua Xu, reviewed by Yongzhi Chen)

Posted by we...@apache.org.
HIVE-16647: Improve the validation output to make the output to stderr and stdout more consistent (Aihua Xu, reviewed by Yongzhi Chen)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2bcbd29e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2bcbd29e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2bcbd29e

Branch: refs/heads/hive-14535
Commit: 2bcbd29e8d2fbc740c762997663cc40853892add
Parents: c19981c
Author: Aihua Xu <ai...@apache.org>
Authored: Thu May 11 10:11:29 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Tue May 16 09:14:49 2017 -0400

----------------------------------------------------------------------
 .../org/apache/hive/beeline/HiveSchemaTool.java | 46 ++++++++++----------
 1 file changed, 22 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/2bcbd29e/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index 7ab927a..86faeb0 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -191,7 +191,7 @@ public class HiveSchemaTool {
     try(Statement stmt = metastoreConn.createStatement();
         ResultSet res = stmt.executeQuery(versionQuery)) {
       if (!res.next()) {
-        throw new HiveMetaException("Could not find version info in metastore VERSION table");
+        throw new HiveMetaException("Could not find version info in metastore VERSION table.");
       }
       String currentSchemaVersion = res.getString(1);
       if (checkDuplicatedVersion && res.next()) {
@@ -244,7 +244,6 @@ public class HiveSchemaTool {
     }
     if (numOfInvalid > 0) {
       isValid = false;
-      System.err.println("Total number of invalid DB locations is: "+ numOfInvalid);
     }
     return isValid;
   }
@@ -305,7 +304,6 @@ public class HiveSchemaTool {
     }
     if (numOfInvalid > 0) {
       isValid = false;
-      System.err.println("Total number of invalid TABLE locations is: "+ numOfInvalid);
     }
     return isValid;
   }
@@ -367,7 +365,6 @@ public class HiveSchemaTool {
     }
     if (numOfInvalid > 0) {
       isValid = false;
-      System.err.println("Total number of invalid PARTITION locations is: "+ numOfInvalid);
     }
     return isValid;
   }
@@ -384,11 +381,11 @@ public class HiveSchemaTool {
     }
 
     if (getDbCommandParser(dbType).needsQuotedIdentifier()) {
-      skewedColLoc = "select t.\"TBL_NAME\", t.\"TBL_ID\", sk.\"STRING_LIST_ID_KID\", sk.\"LOCATION\" from \"TBLS\" t, \"SDS\" s, \"SKEWED_COL_VALUE_LOC_MAP\" sk "
-           + "where sk.\"SD_ID\" = s.\"SD_ID\" and s.\"SD_ID\" = t.\"SD_ID\" and sk.\"STRING_LIST_ID_KID\" >= ? and sk.\"STRING_LIST_ID_KID\" <= ? ";
+      skewedColLoc = "select t.\"TBL_NAME\", t.\"TBL_ID\", sk.\"STRING_LIST_ID_KID\", sk.\"LOCATION\", db.\"NAME\", db.\"DB_ID\" from \"TBLS\" t, \"SDS\" s, \"DBS\" db, \"SKEWED_COL_VALUE_LOC_MAP\" sk "
+           + "where sk.\"SD_ID\" = s.\"SD_ID\" and s.\"SD_ID\" = t.\"SD_ID\" and t.\"DB_ID\" = db.\"DB_ID\" and sk.\"STRING_LIST_ID_KID\" >= ? and sk.\"STRING_LIST_ID_KID\" <= ? ";
     } else {
-      skewedColLoc = "select t.TBL_NAME, t.TBL_ID, sk.STRING_LIST_ID_KID, sk.LOCATION from TBLS t, SDS s, SKEWED_COL_VALUE_LOC_MAP sk "
-           + "where sk.SD_ID = s.SD_ID and s.SD_ID = t.SD_ID and sk.STRING_LIST_ID_KID >= ? and sk.STRING_LIST_ID_KID <= ? ";
+      skewedColLoc = "select t.TBL_NAME, t.TBL_ID, sk.STRING_LIST_ID_KID, sk.LOCATION, db.NAME, db.DB_ID from TBLS t, SDS s, DBS db, SKEWED_COL_VALUE_LOC_MAP sk "
+           + "where sk.SD_ID = s.SD_ID and s.SD_ID = t.SD_ID and t.DB_ID = db.DB_ID and sk.STRING_LIST_ID_KID >= ? and sk.STRING_LIST_ID_KID <= ? ";
     }
 
     long maxID = 0, minID = 0;
@@ -410,7 +407,8 @@ public class HiveSchemaTool {
         res = pStmt.executeQuery();
         while (res.next()) {
           String locValue = res.getString(4);
-          String entity = "Table "  + getNameOrID(res,1,2) +
+          String entity = "Database " + getNameOrID(res,5,6) +
+              ", Table " + getNameOrID(res,1,2) +
               ", String list " + res.getString(3);
           if (!checkLocation(entity, locValue, defaultServers)) {
             numOfInvalid++;
@@ -425,7 +423,6 @@ public class HiveSchemaTool {
     }
     if (numOfInvalid > 0) {
       isValid = false;
-      System.err.println("Total number of invalid SKEWED_COL_VALUE_LOC_MAP locations is: "+ numOfInvalid);
     }
     return isValid;
   }
@@ -445,14 +442,14 @@ public class HiveSchemaTool {
       URI[] defaultServers) {
     boolean isValid = true;
     if (entityLocation == null) {
-      System.err.println(entity + ", error: empty location");
+      System.err.println(entity + ", Error: empty location");
       isValid = false;
     } else {
       try {
         URI currentUri = new Path(entityLocation).toUri();
         String scheme = currentUri.getScheme();
         if (StringUtils.isEmpty(scheme)) {
-          System.err.println(entity + ", location: "+ entityLocation + ", error: missing location scheme");
+          System.err.println(entity + ", Location: "+ entityLocation + ", Error: missing location scheme");
           isValid = false;
         } else if (ArrayUtils.isNotEmpty(defaultServers) && currentUri.getAuthority() != null) {
           String authority = currentUri.getAuthority();
@@ -465,12 +462,12 @@ public class HiveSchemaTool {
             }
           }
           if (!matchServer) {
-            System.err.println(entity + ", location: " + entityLocation + ", error: mismatched server");
+            System.err.println(entity + ", Location: " + entityLocation + ", Error: mismatched server");
             isValid = false;
           }
         }
       } catch (Exception pe) {
-        System.err.println(entity + ", error: invalid location " + pe.getMessage());
+        System.err.println(entity + ", Error: invalid location - " + pe.getMessage());
         isValid =false;
       }
     }
@@ -722,11 +719,12 @@ public class HiveSchemaTool {
       if (hme.getMessage().contains("Metastore schema version is not compatible")
         || hme.getMessage().contains("Multiple versions were found in metastore")
         || hme.getMessage().contains("Could not find version info in metastore VERSION table")) {
-        System.out.println("Failed in schema version validation: " + hme.getMessage());
-          return false;
-        } else {
-          throw hme;
-        }
+        System.err.println(hme.getMessage());
+        System.out.println("Failed in schema version validation.");
+        return false;
+      } else {
+        throw hme;
+      }
     }
     System.out.println("Succeeded in schema version validation.");
     return true;
@@ -745,7 +743,8 @@ public class HiveSchemaTool {
     try {
       version = getMetaStoreSchemaVersion(hmsConn);
     } catch (HiveMetaException he) {
-      System.err.println("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
+      System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage());
+      System.out.println("Failed in schema version validation.");
       LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage());
       return false;
     }
@@ -792,17 +791,16 @@ public class HiveSchemaTool {
       }
     } catch (Exception e) {
       System.err.println("Exception in parsing schema file. Cause:" + e.getMessage());
-      System.out.println("Schema table validation failed!!!");
+      System.out.println("Failed in schema table validation.");
       return false;
     }
 
     LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]");
     LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]");
     // now diff the lists
-    int schemaSize = schemaTables.size();
     schemaTables.removeAll(dbTables);
     if (schemaTables.size() > 0) {
-      System.out.println("Table(s) [ " + Arrays.toString(schemaTables.toArray())
+      System.err.println("Table(s) [ " + Arrays.toString(schemaTables.toArray())
           + " ] are missing from the metastore database schema.");
       System.out.println("Schema table validation failed!!!");
       return false;
@@ -1096,7 +1094,7 @@ public class HiveSchemaTool {
     Option dryRunOpt = new Option("dryRun", "list SQL scripts (no execute)");
     Option verboseOpt = new Option("verbose", "only print SQL statements");
     Option serversOpt = OptionBuilder.withArgName("serverList")
-        .hasArgs().withDescription("a comma-separated list of servers used in location validation")
+        .hasArgs().withDescription("a comma-separated list of servers used in location validation in the format of scheme://authority (e.g. hdfs://localhost:8000)")
         .create("servers");
     cmdLineOptions.addOption(help);
     cmdLineOptions.addOption(dryRunOpt);


[37/50] [abbrv] hive git commit: HIVE-16143: Improve msck repair batching (Vihang Karajgaonkar, reviewed by Sahil Takiar & Aihua Xu)

Posted by we...@apache.org.
HIVE-16143: Improve msck repair batching (Vihang Karajgaonkar, reviewed by Sahil Takiar & Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0efb9368
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0efb9368
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0efb9368

Branch: refs/heads/hive-14535
Commit: 0efb93681099af4e4b7269c72d86fe379c99da7c
Parents: 86f74fd
Author: Aihua Xu <ai...@apache.org>
Authored: Mon May 15 14:08:32 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon May 15 14:08:32 2017 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  13 +-
 .../apache/hive/common/util/RetryUtilities.java | 112 ++++++
 .../hive/common/util/TestRetryUtilities.java    | 150 ++++++++
 .../test/queries/clientpositive/create_like.q   |   3 +-
 .../results/clientpositive/create_like.q.out    |  15 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   3 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  89 ++---
 .../exec/TestMsckCreatePartitionsInBatches.java | 340 +++++++++++++++++++
 .../test/queries/clientpositive/msck_repair_0.q |  10 +
 .../test/queries/clientpositive/msck_repair_1.q |   8 +
 .../test/queries/clientpositive/msck_repair_2.q |   5 +
 .../test/queries/clientpositive/msck_repair_3.q |   4 +
 .../clientpositive/msck_repair_batchsize.q      |   4 +
 .../results/clientpositive/msck_repair_0.q.out  |  38 ++-
 .../results/clientpositive/msck_repair_1.q.out  |  28 +-
 .../results/clientpositive/msck_repair_2.q.out  |  28 +-
 .../results/clientpositive/msck_repair_3.q.out  |  28 +-
 .../clientpositive/msck_repair_batchsize.q.out  |  34 +-
 ql/src/test/results/clientpositive/repair.q.out |   3 +-
 19 files changed, 857 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 6068f0d..1c37b6e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3369,9 +3369,16 @@ public class HiveConf extends Configuration {
        " others; 'ignore' will skip the validation (legacy behavior, causes bugs in many cases)"),
     HIVE_MSCK_REPAIR_BATCH_SIZE(
         "hive.msck.repair.batch.size", 0,
-        "Batch size for the msck repair command. If the value is greater than zero, "
-            + "it will execute batch wise with the configured batch size. "
-            + "The default value is zero. Zero means it will execute directly (Not batch wise)"),
+        "Batch size for the msck repair command. If the value is greater than zero,\n "
+            + "it will execute batch wise with the configured batch size. In case of errors while\n"
+            + "adding unknown partitions the batch size is automatically reduced by half in the subsequent\n"
+            + "retry attempt. The default value is zero which means it will execute directly (not batch wise)"),
+    HIVE_MSCK_REPAIR_BATCH_MAX_RETRIES("hive.msck.repair.batch.max.retries", 0,
+        "Maximum number of retries for the msck repair command when adding unknown partitions.\n "
+        + "If the value is greater than zero it will retry adding unknown partitions until the maximum\n"
+        + "number of attempts is reached or batch size is reduced to 0, whichever is earlier.\n"
+        + "In each retry attempt it will reduce the batch size by a factor of 2 until it reaches zero.\n"
+        + "If the value is set to zero it will retry until the batch size becomes zero as described above."),
     HIVE_SERVER2_LLAP_CONCURRENT_QUERIES("hive.server2.llap.concurrent.queries", -1,
         "The number of queries allowed in parallel via llap. Negative number implies 'infinite'."),
     HIVE_TEZ_ENABLE_MEMORY_MANAGER("hive.tez.enable.memory.manager", true,

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/common/src/java/org/apache/hive/common/util/RetryUtilities.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/RetryUtilities.java b/common/src/java/org/apache/hive/common/util/RetryUtilities.java
new file mode 100644
index 0000000..3a20f2c
--- /dev/null
+++ b/common/src/java/org/apache/hive/common/util/RetryUtilities.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.common.util;
+
+import java.util.concurrent.Callable;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RetryUtilities {
+  public static class RetryException extends Exception {
+    private static final long serialVersionUID = 1L;
+
+    public RetryException(Exception ex) {
+      super(ex);
+    }
+
+    public RetryException(String msg) {
+      super(msg);
+    }
+  }
+
+  /**
+   * Interface used to create a ExponentialBackOffRetry policy
+   */
+  public static interface ExponentialBackOffRetry<T> {
+    /**
+     * This method should be called by implementations of this ExponentialBackOffRetry policy
+     * It represents the actual work which needs to be done based on a given batch size
+     * @param batchSize The batch size for the work which needs to be executed
+     * @return
+     * @throws Exception
+     */
+    public T execute(int batchSize) throws Exception;
+  }
+
+  /**
+   * This class is a base implementation of a simple exponential back retry policy. The batch size
+   * and decaying factor are provided with the constructor. It reduces the batch size by dividing
+   * it by the decaying factor every time there is an exception in the execute method.
+   */
+  public static abstract class ExponentiallyDecayingBatchWork<T>
+      implements ExponentialBackOffRetry<T> {
+    private int batchSize;
+    private final int decayingFactor;
+    private int maxRetries;
+    private static final Logger LOG = LoggerFactory.getLogger(ExponentiallyDecayingBatchWork.class);
+
+    public ExponentiallyDecayingBatchWork(int batchSize, int reducingFactor, int maxRetries) {
+      if (batchSize <= 0) {
+        throw new IllegalArgumentException(String.format(
+            "Invalid batch size %d provided. Batch size must be greater than 0", batchSize));
+      }
+      this.batchSize = batchSize;
+      if (reducingFactor <= 1) {
+        throw new IllegalArgumentException(String.format(
+            "Invalid decaying factor %d provided. Decaying factor must be greater than 1",
+            batchSize));
+      }
+      if (maxRetries < 0) {
+        throw new IllegalArgumentException(String.format(
+            "Invalid number of maximum retries %d provided. It must be a non-negative integer value",
+            maxRetries));
+      }
+      //if maxRetries is 0 code retries until batch decays to zero
+      this.maxRetries = maxRetries;
+      this.decayingFactor = reducingFactor;
+    }
+
+    public T run() throws Exception {
+      int attempt = 0;
+      while (true) {
+        int size = getNextBatchSize();
+        if (size == 0) {
+          throw new RetryException("Batch size reduced to zero");
+        }
+        try {
+          return execute(size);
+        } catch (Exception ex) {
+          LOG.warn(String.format("Exception thrown while processing using a batch size %d", size),
+              ex);
+        } finally {
+          attempt++;
+          if (attempt == maxRetries) {
+            throw new RetryException(String.format("Maximum number of retry attempts %d exhausted", maxRetries));
+          }
+        }
+      }
+    }
+
+    private int getNextBatchSize() {
+      int ret = batchSize;
+      batchSize /= decayingFactor;
+      return ret;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/common/src/test/org/apache/hive/common/util/TestRetryUtilities.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hive/common/util/TestRetryUtilities.java b/common/src/test/org/apache/hive/common/util/TestRetryUtilities.java
new file mode 100644
index 0000000..4187be2
--- /dev/null
+++ b/common/src/test/org/apache/hive/common/util/TestRetryUtilities.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.common.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hive.common.util.RetryUtilities.ExponentiallyDecayingBatchWork;
+import org.apache.hive.common.util.RetryUtilities.RetryException;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestRetryUtilities {
+
+  private class DummyExponentiallyDecayingBatchWork extends ExponentiallyDecayingBatchWork<Void> {
+    public DummyExponentiallyDecayingBatchWork(int batchSize, int reducingFactor,
+        int throwException) {
+      super(batchSize, reducingFactor, 0);
+      this.exceptionCount = throwException;
+    }
+
+    public DummyExponentiallyDecayingBatchWork(int batchSize, int reducingFactor,
+        int throwException, int maxRetries) {
+      super(batchSize, reducingFactor, maxRetries);
+      this.exceptionCount = throwException;
+    }
+
+    final List<Integer> batchSizes = new ArrayList<>();
+    int exceptionCount = 0;
+
+    @Override
+    public Void execute(int size) throws Exception {
+      batchSizes.add(size);
+      if (exceptionCount > 0) {
+        exceptionCount--;
+        throw new Exception("Dummy exception");
+      }
+      return null;
+    }
+
+    public int getCount() {
+      return batchSizes.size();
+    }
+
+    public int[] getBatchSizes() {
+      int[] ret = new int[batchSizes.size()];
+      int i = 0;
+      for (int b : batchSizes) {
+        ret[i++] = b;
+      }
+      return ret;
+    }
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testZeroBatchSize() {
+    new DummyExponentiallyDecayingBatchWork(0, 2, 0);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testNegativeBatchSize() {
+    new DummyExponentiallyDecayingBatchWork(-1, 2, 0);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testZeroDecayingFactor() {
+    new DummyExponentiallyDecayingBatchWork(5, 0, 0);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testOneDecayingFactor() {
+    new DummyExponentiallyDecayingBatchWork(10, 1, 0);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testNegativeMaxRetries() {
+    new DummyExponentiallyDecayingBatchWork(10, 2, 0, -1);
+  }
+
+  @Test
+  public void testNumberOfAttempts() throws Exception {
+    // test perfectly divisible batchsize and decaying factor
+    DummyExponentiallyDecayingBatchWork dummy = new DummyExponentiallyDecayingBatchWork(10, 2, 0);
+    dummy.run();
+    Assert.assertEquals("Unexpected number of executions of execute method", 1, dummy.getCount());
+    // there were no exception. Batchsize doesn't change until there is an exception
+    Assert.assertArrayEquals(new int[] { 10 }, dummy.getBatchSizes());
+    // test batchsize is not divisible by decaying factor
+    dummy = new DummyExponentiallyDecayingBatchWork(11, 2, 0);
+    dummy.run();
+    Assert.assertEquals("Unexpected number of executions of execute method", 1, dummy.getCount());
+    // there were no exception. Batchsize doesn't change until there is an exception
+    Assert.assertArrayEquals(new int[] { 11 }, dummy.getBatchSizes());
+
+    dummy = new DummyExponentiallyDecayingBatchWork(11, 3, 1);
+    // batches will be sized 11,3
+    dummy.run();
+    Assert.assertEquals("Unexpected number of executions of execute method", 2, dummy.getCount());
+    Assert.assertArrayEquals(new int[] { 11, 3 }, dummy.getBatchSizes());
+
+    dummy = new DummyExponentiallyDecayingBatchWork(11, 3, 2);
+    // batches will be sized 11,3,1
+    dummy.run();
+    Assert.assertEquals("Unexpected number of executions of execute method", 3, dummy.getCount());
+    Assert.assertArrayEquals(new int[] { 11, 3, 1 }, dummy.getBatchSizes());
+
+    dummy = new DummyExponentiallyDecayingBatchWork(12, 3, 2);
+    // batches will be sized 12,4,1
+    dummy.run();
+    Assert.assertEquals("Unexpected number of executions of execute method", 3, dummy.getCount());
+    Assert.assertArrayEquals(new int[] { 12, 4, 1 }, dummy.getBatchSizes());
+  }
+
+  @Test
+  public void testZeroMaxRetriesValue() throws Exception {
+    DummyExponentiallyDecayingBatchWork dummy = new DummyExponentiallyDecayingBatchWork(10, 2, 3, 0);
+    dummy.run();
+    // batches will be sized 10, 5, 2, 1
+    Assert.assertEquals("Unexpected number of executions of execute method", 4, dummy.getCount());
+    Assert.assertArrayEquals(new int[] { 10, 5, 2, 1 }, dummy.getBatchSizes());
+
+    dummy = new DummyExponentiallyDecayingBatchWork(17, 2, 4, 0);
+    // batches will be sized 17, 8, 4, 2, 1
+    dummy.run();
+    Assert.assertEquals("Unexpected number of executions of execute method", 5, dummy.getCount());
+    Assert.assertArrayEquals(new int[] { 17, 8, 4, 2, 1 }, dummy.getBatchSizes());
+  }
+
+  @Test(expected = RetryException.class)
+  public void testRetriesExhausted() throws Exception {
+    // attempts at execute will be made using batchsizes 11, 3, 1, throws retry exception
+    DummyExponentiallyDecayingBatchWork dummy = new DummyExponentiallyDecayingBatchWork(11, 3, 3);
+    dummy.run();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/itests/hive-blobstore/src/test/queries/clientpositive/create_like.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/create_like.q b/itests/hive-blobstore/src/test/queries/clientpositive/create_like.q
index 38f384e..63ea3f6 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/create_like.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/create_like.q
@@ -21,10 +21,11 @@ DROP TABLE like_table;
 CREATE EXTERNAL TABLE like_table LIKE blobstore_partitioned_source_table LOCATION '${hiveconf:test.blobstore.path.unique}/create_like/data';
 
 MSCK REPAIR TABLE like_table;
+show partitions like_table;
 
 SELECT * FROM blobstore_partitioned_source_table;
 SELECT * FROM like_table;
 
 DROP TABLE like_table;
 
-SELECT * FROM blobstore_partitioned_source_table;
\ No newline at end of file
+SELECT * FROM blobstore_partitioned_source_table;

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/itests/hive-blobstore/src/test/results/clientpositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/create_like.q.out b/itests/hive-blobstore/src/test/results/clientpositive/create_like.q.out
index 0d362a7..8dcbb93 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/create_like.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/create_like.q.out
@@ -91,10 +91,17 @@ POSTHOOK: query: MSCK REPAIR TABLE like_table
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@like_table
 Partitions not in metastore:	like_table:dt=20110924/hour=1	like_table:dt=20110924/hour=2	like_table:dt=20110925/hour=1	like_table:dt=20110925/hour=2
-Repair: Added partition to metastore like_table:dt=20110924/hour=1
-Repair: Added partition to metastore like_table:dt=20110924/hour=2
-Repair: Added partition to metastore like_table:dt=20110925/hour=1
-Repair: Added partition to metastore like_table:dt=20110925/hour=2
+#### A masked pattern was here ####
+PREHOOK: query: show partitions like_table
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@like_table
+POSTHOOK: query: show partitions like_table
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@like_table
+dt=20110924/hour=1
+dt=20110924/hour=2
+dt=20110925/hour=1
+dt=20110925/hour=2
 PREHOOK: query: SELECT * FROM blobstore_partitioned_source_table
 PREHOOK: type: QUERY
 PREHOOK: Input: default@blobstore_partitioned_source_table

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index d408321..d296851 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1699,7 +1699,8 @@ public class QTestUtil {
       ".*at com\\.sun\\.proxy.*",
       ".*at com\\.jolbox.*",
       ".*at com\\.zaxxer.*",
-      "org\\.apache\\.hadoop\\.hive\\.metastore\\.model\\.MConstraint@([0-9]|[a-z])*"
+      "org\\.apache\\.hadoop\\.hive\\.metastore\\.model\\.MConstraint@([0-9]|[a-z])*",
+      "^Repair: Added partition to metastore.*"
   });
 
   private final Pattern[] partialReservedPlanMask = toPattern(new String[] {

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index b07d6b1..44655af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -249,10 +249,12 @@ import org.apache.hadoop.util.ToolRunner;
 import org.apache.hive.common.util.AnnotationUtils;
 import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hive.common.util.ReflectionUtil;
+import org.apache.hive.common.util.RetryUtilities;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.stringtemplate.v4.ST;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Iterables;
 
 /**
@@ -1848,19 +1850,6 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     }
   }
 
-  private void msckAddPartitionsOneByOne(Hive db, Table table,
-      Set<CheckResult.PartitionResult> partsNotInMs, List<String> repairOutput) {
-    for (CheckResult.PartitionResult part : partsNotInMs) {
-      try {
-        db.createPartition(table, Warehouse.makeSpecFromName(part.getPartitionName()));
-        repairOutput.add("Repair: Added partition to metastore "
-            + table.getTableName() + ':' + part.getPartitionName());
-      } catch (Exception e) {
-        LOG.warn("Repair error, could not add partition to metastore: ", e);
-      }
-    }
-  }
-
   private int compact(Hive db, AlterTableSimpleDesc desc) throws HiveException {
 
     Table tbl = db.getTable(desc.getTableName());
@@ -1988,34 +1977,18 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
           }
         }
         Table table = db.getTable(msckDesc.getTableName());
-        AddPartitionDesc apd = new AddPartitionDesc(
-            table.getDbName(), table.getTableName(), false);
+        int batchSize = conf.getIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE);
+        int maxRetries = conf.getIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_MAX_RETRIES);
+        int decayingFactor = 2;
+        if (batchSize == 0) {
+          //batching is not enabled. Try to add all the partitions in one call
+          batchSize = partsNotInMs.size();
+        }
         try {
-          int batch_size = conf.getIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE);
-          if (batch_size > 0 && partsNotInMs.size() > batch_size) {
-            int counter = 0;
-            for (CheckResult.PartitionResult part : partsNotInMs) {
-              counter++;
-              apd.addPartition(Warehouse.makeSpecFromName(part.getPartitionName()), null);
-              repairOutput.add("Repair: Added partition to metastore " + msckDesc.getTableName()
-                  + ':' + part.getPartitionName());
-              if (counter % batch_size == 0 || counter == partsNotInMs.size()) {
-                db.createPartitions(apd);
-                apd = new AddPartitionDesc(table.getDbName(), table.getTableName(), false);
-              }
-            }
-          } else {
-            for (CheckResult.PartitionResult part : partsNotInMs) {
-              apd.addPartition(Warehouse.makeSpecFromName(part.getPartitionName()), null);
-              repairOutput.add("Repair: Added partition to metastore " + msckDesc.getTableName()
-                  + ':' + part.getPartitionName());
-            }
-            db.createPartitions(apd);
-          }
+          createPartitionsInBatches(db, repairOutput, partsNotInMs, table, batchSize,
+              decayingFactor, maxRetries);
         } catch (Exception e) {
-          LOG.info("Could not bulk-add partitions to metastore; trying one by one", e);
-          repairOutput.clear();
-          msckAddPartitionsOneByOne(db, table, partsNotInMs, repairOutput);
+          throw new HiveException(e);
         }
       }
     } catch (HiveException e) {
@@ -2067,6 +2040,44 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     return 0;
   }
 
+  @VisibleForTesting
+  void createPartitionsInBatches(Hive db, List<String> repairOutput,
+      Set<CheckResult.PartitionResult> partsNotInMs, Table table, int batchSize, int decayingFactor, int maxRetries)
+      throws Exception {
+    String addMsgFormat = "Repair: Added partition to metastore "
+        + table.getTableName() + ":%s";
+    Set<CheckResult.PartitionResult> batchWork = new HashSet<>(partsNotInMs);
+    new RetryUtilities.ExponentiallyDecayingBatchWork<Void>(batchSize, decayingFactor, maxRetries) {
+      @Override
+      public Void execute(int size) throws Exception {
+        while (!batchWork.isEmpty()) {
+          //get the current batch size
+          int currentBatchSize = size;
+          AddPartitionDesc apd =
+              new AddPartitionDesc(table.getDbName(), table.getTableName(), true);
+          //store the partitions temporarily until processed
+          List<CheckResult.PartitionResult> lastBatch = new ArrayList<>(currentBatchSize);
+          List<String> addMsgs = new ArrayList<>(currentBatchSize);
+          //add the number of partitions given by the current batchsize
+          for (CheckResult.PartitionResult part : batchWork) {
+            if (currentBatchSize == 0) {
+              break;
+            }
+            apd.addPartition(Warehouse.makeSpecFromName(part.getPartitionName()), null);
+            lastBatch.add(part);
+            addMsgs.add(String.format(addMsgFormat, part.getPartitionName()));
+            currentBatchSize--;
+          }
+          db.createPartitions(apd);
+          // if last batch is successful remove it from partsNotInMs
+          batchWork.removeAll(lastBatch);
+          repairOutput.addAll(addMsgs);
+        }
+        return null;
+      }
+    }.run();
+  }
+
   /**
    * Write the result of msck to a writer.
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
new file mode 100644
index 0000000..2454afb
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
@@ -0,0 +1,340 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec;
+
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.CheckResult.PartitionResult;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.util.RetryUtilities.RetryException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
+
+public class TestMsckCreatePartitionsInBatches {
+  private static HiveConf hiveConf;
+  private static DDLTask ddlTask;
+  private final String tableName = "test_msck_batch";
+  private static Hive db;
+  private List<String> repairOutput;
+  private Table table;
+
+  @BeforeClass
+  public static void setupClass() throws HiveException {
+    hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class);
+    hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5);
+    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
+        "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
+    SessionState.start(hiveConf);
+    db = Hive.get(hiveConf);
+    ddlTask = new DDLTask();
+  }
+
+  @Before
+  public void before() throws Exception {
+    createPartitionedTable("default", tableName);
+    table = db.getTable(tableName);
+    repairOutput = new ArrayList<String>();
+  }
+
+  @After
+  public void after() throws Exception {
+    cleanUpTableQuietly("default", tableName);
+  }
+
+  private Table createPartitionedTable(String dbName, String tableName) throws Exception {
+    try {
+      db.dropTable(dbName, tableName);
+      db.createTable(tableName, Arrays.asList("key", "value"), // Data columns.
+          Arrays.asList("city"), // Partition columns.
+          TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class);
+      return db.getTable(dbName, tableName);
+    } catch (Exception exception) {
+      fail("Unable to drop and create table " + dbName + "." + tableName + " because "
+          + StringUtils.stringifyException(exception));
+      throw exception;
+    }
+  }
+
+  private void cleanUpTableQuietly(String dbName, String tableName) {
+    try {
+      db.dropTable(dbName, tableName, true, true, true);
+    } catch (Exception exception) {
+      fail("Unexpected exception: " + StringUtils.stringifyException(exception));
+    }
+  }
+
+  private Set<PartitionResult> createPartsNotInMs(int numOfParts) {
+    Set<PartitionResult> partsNotInMs = new HashSet<>();
+    for (int i = 0; i < numOfParts; i++) {
+      PartitionResult result = new PartitionResult();
+      result.setPartitionName("city=dummyCity_" + String.valueOf(i));
+      partsNotInMs.add(result);
+    }
+    return partsNotInMs;
+  }
+
+  /**
+   * Tests the number of times Hive.createPartitions calls are executed with total number of
+   * partitions to be added are equally divisible by batch size
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testNumberOfCreatePartitionCalls() throws Exception {
+    // create 10 dummy partitions
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(10);
+    Hive spyDb = Mockito.spy(db);
+    // batch size of 5 and decaying factor of 2
+    ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 5, 2, 0);
+    // there should be 2 calls to create partitions with each batch size of 5
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb, Mockito.times(2)).createPartitions(argument.capture());
+    // confirm the batch sizes were 5, 5 in the two calls to create partitions
+    List<AddPartitionDesc> apds = argument.getAllValues();
+    int retryAttempt = 1;
+    Assert.assertEquals(String.format("Unexpected batch size in retry attempt %d ", retryAttempt++),
+        5, apds.get(0).getPartitionCount());
+    Assert.assertEquals(String.format("Unexpected batch size in retry attempt %d ", retryAttempt++),
+        5, apds.get(1).getPartitionCount());
+  }
+
+  /**
+   * Tests the number of times Hive.createPartitions calls are executed with total number of
+   * partitions to be added are not exactly divisible by batch size
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testUnevenNumberOfCreatePartitionCalls() throws Exception {
+    // create 9 dummy partitions
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(9);
+    Hive spyDb = Mockito.spy(db);
+    // batch size of 5 and decaying factor of 2
+    ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 5, 2, 0);
+    // there should be 2 calls to create partitions with batch sizes of 5, 4
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb, Mockito.times(2)).createPartitions(argument.capture());
+    // confirm the batch sizes were 5, 4 in the two calls to create partitions
+    List<AddPartitionDesc> apds = argument.getAllValues();
+    int retryAttempt = 1;
+    Assert.assertEquals(String.format("Unexpected batch size in retry attempt %d ", retryAttempt++),
+        5, apds.get(0).getPartitionCount());
+    Assert.assertEquals(String.format("Unexpected batch size in retry attempt %d ", retryAttempt++),
+        4, apds.get(1).getPartitionCount());
+  }
+
+  /**
+   * Tests the number of times Hive.createPartitions calls are executed with total number of
+   * partitions exactly equal to batch size
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testEqualNumberOfPartitions() throws Exception {
+    // create 13 dummy partitions
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(13);
+    Hive spyDb = Mockito.spy(db);
+    // batch size of 13 and decaying factor of 2
+    ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 13, 2, 0);
+    // there should be 1 call to create partitions with batch sizes of 13
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb, Mockito.times(1)).createPartitions(argument.capture());
+    Assert.assertEquals("Unexpected number of batch size", 13,
+        argument.getValue().getPartitionCount());
+  }
+
+  /**
+   * Tests the number of times Hive.createPartitions calls are executed with total number of
+   * partitions to is less than batch size
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testSmallNumberOfPartitions() throws Exception {
+    // create 10 dummy partitions
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(10);
+    Hive spyDb = Mockito.spy(db);
+    // batch size of 20 and decaying factor of 2
+    ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 20, 2, 0);
+    // there should be 1 call to create partitions with batch sizes of 10
+    Mockito.verify(spyDb, Mockito.times(1)).createPartitions(Mockito.anyObject());
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb).createPartitions(argument.capture());
+    Assert.assertEquals("Unexpected number of batch size", 10,
+        argument.getValue().getPartitionCount());
+  }
+
+  /**
+   * Tests the number of calls to createPartitions and the respective batch sizes when first call to
+   * createPartitions throws HiveException. The batch size should be reduced by the decayingFactor
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testBatchingWhenException() throws Exception {
+    // create 13 dummy partitions
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(23);
+    Hive spyDb = Mockito.spy(db);
+    // first call to createPartitions should throw exception
+    Mockito.doThrow(HiveException.class).doCallRealMethod().doCallRealMethod().when(spyDb)
+        .createPartitions(Mockito.any(AddPartitionDesc.class));
+
+    // test with a batch size of 30 and decaying factor of 2
+    ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 30, 2, 0);
+    // confirm the batch sizes were 23, 15, 8 in the three calls to create partitions
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    // there should be 3 calls to create partitions with batch sizes of 23, 15, 8
+    Mockito.verify(spyDb, Mockito.times(3)).createPartitions(argument.capture());
+    List<AddPartitionDesc> apds = argument.getAllValues();
+    int retryAttempt = 1;
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 23,
+        apds.get(0).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 15,
+        apds.get(1).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 8,
+        apds.get(2).getPartitionCount());
+  }
+
+  /**
+   * Tests the retries exhausted case when Hive.createPartitions method call always keep throwing
+   * HiveException. The batch sizes should exponentially decreased based on the decaying factor and
+   * ultimately give up when it reaches 0
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testRetriesExhaustedBatchSize() throws Exception {
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(17);
+    Hive spyDb = Mockito.spy(db);
+    Mockito.doThrow(HiveException.class).when(spyDb)
+        .createPartitions(Mockito.any(AddPartitionDesc.class));
+    // batch size of 5 and decaying factor of 2
+    Exception ex = null;
+    try {
+      ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 30, 2, 0);
+    } catch (Exception retryEx) {
+      ex = retryEx;
+    }
+    Assert.assertFalse("Exception was expected but was not thrown", ex == null);
+    Assert.assertTrue("Unexpected class of exception thrown", ex instanceof RetryException);
+    // there should be 5 calls to create partitions with batch sizes of 17, 15, 7, 3, 1
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb, Mockito.times(5)).createPartitions(argument.capture());
+    List<AddPartitionDesc> apds = argument.getAllValues();
+    int retryAttempt = 1;
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 17,
+        apds.get(0).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 15,
+        apds.get(1).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 7,
+        apds.get(2).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 3,
+        apds.get(3).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 1,
+        apds.get(4).getPartitionCount());
+  }
+
+  /**
+   * Tests the maximum retry attempts provided by configuration
+   * @throws Exception
+   */
+  @Test
+  public void testMaxRetriesReached() throws Exception {
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(17);
+    Hive spyDb = Mockito.spy(db);
+    Mockito.doThrow(HiveException.class).when(spyDb)
+        .createPartitions(Mockito.any(AddPartitionDesc.class));
+    // batch size of 5 and decaying factor of 2
+    Exception ex = null;
+    try {
+      ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 30, 2, 2);
+    } catch (Exception retryEx) {
+      ex = retryEx;
+    }
+    Assert.assertFalse("Exception was expected but was not thrown", ex == null);
+    Assert.assertTrue("Unexpected class of exception thrown", ex instanceof RetryException);
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb, Mockito.times(2)).createPartitions(argument.capture());
+    List<AddPartitionDesc> apds = argument.getAllValues();
+    int retryAttempt = 1;
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 17,
+        apds.get(0).getPartitionCount());
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 15,
+        apds.get(1).getPartitionCount());
+  }
+
+  /**
+   * Tests when max number of retries is set to 1. In this case the number of retries should
+   * be specified
+   * @throws Exception
+   */
+  @Test
+  public void testOneMaxRetries() throws Exception {
+    Set<PartitionResult> partsNotInMs = createPartsNotInMs(17);
+    Hive spyDb = Mockito.spy(db);
+    Mockito.doThrow(HiveException.class).when(spyDb)
+        .createPartitions(Mockito.any(AddPartitionDesc.class));
+    // batch size of 5 and decaying factor of 2
+    Exception ex = null;
+    try {
+      ddlTask.createPartitionsInBatches(spyDb, repairOutput, partsNotInMs, table, 30, 2, 1);
+    } catch (Exception retryEx) {
+      ex = retryEx;
+    }
+    Assert.assertFalse("Exception was expected but was not thrown", ex == null);
+    Assert.assertTrue("Unexpected class of exception thrown", ex instanceof RetryException);
+    // there should be 5 calls to create partitions with batch sizes of 17, 15, 7, 3, 1
+    ArgumentCaptor<AddPartitionDesc> argument = ArgumentCaptor.forClass(AddPartitionDesc.class);
+    Mockito.verify(spyDb, Mockito.times(1)).createPartitions(argument.capture());
+    List<AddPartitionDesc> apds = argument.getAllValues();
+    int retryAttempt = 1;
+    Assert.assertEquals(
+        String.format("Unexpected batch size in retry attempt %d ", retryAttempt++), 17,
+        apds.get(0).getPartitionCount());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/queries/clientpositive/msck_repair_0.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/msck_repair_0.q b/ql/src/test/queries/clientpositive/msck_repair_0.q
index 2254233..cb291fe 100644
--- a/ql/src/test/queries/clientpositive/msck_repair_0.q
+++ b/ql/src/test/queries/clientpositive/msck_repair_0.q
@@ -7,15 +7,23 @@ CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
 
 MSCK TABLE repairtable;
 
+show partitions repairtable;
+
 dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b;
 dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b/datafile;
 
 MSCK TABLE default.repairtable;
 
+show partitions default.repairtable;
+
 MSCK REPAIR TABLE default.repairtable;
 
+show partitions default.repairtable;
+
 MSCK TABLE repairtable;
 
+show partitions repairtable;
+
 set hive.mapred.mode=strict;
 
 dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=e/p2=f/p3=g;
@@ -23,4 +31,6 @@ dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=e/p2=f/p3=g/datafile;
 
 MSCK REPAIR TABLE default.repairtable;
 
+show partitions default.repairtable;
+
 DROP TABLE default.repairtable;

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/queries/clientpositive/msck_repair_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/msck_repair_1.q b/ql/src/test/queries/clientpositive/msck_repair_1.q
index ea596cb..21aca3b 100644
--- a/ql/src/test/queries/clientpositive/msck_repair_1.q
+++ b/ql/src/test/queries/clientpositive/msck_repair_1.q
@@ -6,13 +6,21 @@ CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
 
 MSCK TABLE repairtable;
 
+SHOW PARTITIONS repairtable;
+
 dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b;
 dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b/datafile;
 
 MSCK TABLE default.repairtable;
 
+SHOW PARTITIONS default.repairtable;
+
 MSCK REPAIR TABLE default.repairtable;
 
+SHOW PARTITIONS default.repairtable;
+
 MSCK TABLE repairtable;
 
+SHOW PARTITIONS repairtable;
+
 DROP TABLE default.repairtable;

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/queries/clientpositive/msck_repair_2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/msck_repair_2.q b/ql/src/test/queries/clientpositive/msck_repair_2.q
index d833821..77785e9 100644
--- a/ql/src/test/queries/clientpositive/msck_repair_2.q
+++ b/ql/src/test/queries/clientpositive/msck_repair_2.q
@@ -7,14 +7,19 @@ CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
 
 MSCK TABLE repairtable;
 
+show partitions repairtable;
+
 dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b;
 dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b/datafile;
 dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=c/datafile;
 
 MSCK TABLE default.repairtable;
+show partitions repairtable;
 
 MSCK REPAIR TABLE default.repairtable;
+show partitions repairtable;
 
 MSCK TABLE repairtable;
+show partitions repairtable;
 
 DROP TABLE default.repairtable;

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/queries/clientpositive/msck_repair_3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/msck_repair_3.q b/ql/src/test/queries/clientpositive/msck_repair_3.q
index fdefca1..f42443f 100644
--- a/ql/src/test/queries/clientpositive/msck_repair_3.q
+++ b/ql/src/test/queries/clientpositive/msck_repair_3.q
@@ -5,13 +5,17 @@ DROP TABLE IF EXISTS repairtable;
 CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
 
 MSCK TABLE repairtable;
+show partitions repairtable;
 
 dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/p3=b;
 
 MSCK TABLE default.repairtable;
+show partitions repairtable;
 
 MSCK REPAIR TABLE default.repairtable;
+show partitions repairtable;
 
 MSCK TABLE repairtable;
+show partitions repairtable;
 
 DROP TABLE default.repairtable;

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/queries/clientpositive/msck_repair_batchsize.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/msck_repair_batchsize.q b/ql/src/test/queries/clientpositive/msck_repair_batchsize.q
index e56e97a..a44c00e 100644
--- a/ql/src/test/queries/clientpositive/msck_repair_batchsize.q
+++ b/ql/src/test/queries/clientpositive/msck_repair_batchsize.q
@@ -14,10 +14,13 @@ dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=b/p2=a/datafile;
 dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=c/p2=a/datafile;
 
 MSCK TABLE default.repairtable;
+show partitions default.repairtable;
 
 MSCK REPAIR TABLE default.repairtable;
+show partitions default.repairtable;
 
 MSCK TABLE repairtable;
+show partitions repairtable;
 
 DROP TABLE default.repairtable;
 
@@ -28,5 +31,6 @@ CREATE TABLE `repairtable`( `col` string) PARTITIONED BY (  `p1` string,  `p2` s
 dfs -touchz ${system:test.tmp.dir}/apps/hive/warehouse/test.db/repairtable/p1=c/p2=a/p3=b/datafile;
 set hive.mv.files.thread=1;
 MSCK TABLE repairtable;
+show partitions repairtable;
 
 DROP TABLE default.repairtable;

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/results/clientpositive/msck_repair_0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/msck_repair_0.q.out b/ql/src/test/results/clientpositive/msck_repair_0.q.out
index 2e0d9dc..f48cc78 100644
--- a/ql/src/test/results/clientpositive/msck_repair_0.q.out
+++ b/ql/src/test/results/clientpositive/msck_repair_0.q.out
@@ -16,6 +16,12 @@ PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -23,6 +29,12 @@ POSTHOOK: query: MSCK TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
+PREHOOK: query: show partitions default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK REPAIR TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -30,13 +42,27 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
-Repair: Added partition to metastore default.repairtable:p1=c/p2=a
+#### A masked pattern was here ####
+PREHOOK: query: show partitions default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: MSCK TABLE repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: MSCK REPAIR TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -44,7 +70,15 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=e/p2=f
-Repair: Added partition to metastore default.repairtable:p1=e/p2=f
+#### A masked pattern was here ####
+PREHOOK: query: show partitions default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
+p1=e/p2=f
 PREHOOK: query: DROP TABLE default.repairtable
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@repairtable

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/results/clientpositive/msck_repair_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/msck_repair_1.q.out b/ql/src/test/results/clientpositive/msck_repair_1.q.out
index 3f2fe75..5f94246 100644
--- a/ql/src/test/results/clientpositive/msck_repair_1.q.out
+++ b/ql/src/test/results/clientpositive/msck_repair_1.q.out
@@ -16,6 +16,12 @@ PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: SHOW PARTITIONS repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: SHOW PARTITIONS repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -23,6 +29,12 @@ POSTHOOK: query: MSCK TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
+PREHOOK: query: SHOW PARTITIONS default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: SHOW PARTITIONS default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK REPAIR TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -30,13 +42,27 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
-Repair: Added partition to metastore default.repairtable:p1=c/p2=a
+#### A masked pattern was here ####
+PREHOOK: query: SHOW PARTITIONS default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: SHOW PARTITIONS default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: MSCK TABLE repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: SHOW PARTITIONS repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: SHOW PARTITIONS repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: DROP TABLE default.repairtable
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@repairtable

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/results/clientpositive/msck_repair_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/msck_repair_2.q.out b/ql/src/test/results/clientpositive/msck_repair_2.q.out
index 3f2fe75..c702f37 100644
--- a/ql/src/test/results/clientpositive/msck_repair_2.q.out
+++ b/ql/src/test/results/clientpositive/msck_repair_2.q.out
@@ -16,6 +16,12 @@ PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -23,6 +29,12 @@ POSTHOOK: query: MSCK TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK REPAIR TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -30,13 +42,27 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
-Repair: Added partition to metastore default.repairtable:p1=c/p2=a
+#### A masked pattern was here ####
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: MSCK TABLE repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: DROP TABLE default.repairtable
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@repairtable

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/results/clientpositive/msck_repair_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/msck_repair_3.q.out b/ql/src/test/results/clientpositive/msck_repair_3.q.out
index 3f2fe75..c702f37 100644
--- a/ql/src/test/results/clientpositive/msck_repair_3.q.out
+++ b/ql/src/test/results/clientpositive/msck_repair_3.q.out
@@ -16,6 +16,12 @@ PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -23,6 +29,12 @@ POSTHOOK: query: MSCK TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK REPAIR TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -30,13 +42,27 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
-Repair: Added partition to metastore default.repairtable:p1=c/p2=a
+#### A masked pattern was here ####
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: MSCK TABLE repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=c/p2=a
 PREHOOK: query: DROP TABLE default.repairtable
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@repairtable

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/results/clientpositive/msck_repair_batchsize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/msck_repair_batchsize.q.out b/ql/src/test/results/clientpositive/msck_repair_batchsize.q.out
index ba99024..aed9271 100644
--- a/ql/src/test/results/clientpositive/msck_repair_batchsize.q.out
+++ b/ql/src/test/results/clientpositive/msck_repair_batchsize.q.out
@@ -23,6 +23,12 @@ POSTHOOK: query: MSCK TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=a/p2=a	repairtable:p1=b/p2=a	repairtable:p1=c/p2=a
+PREHOOK: query: show partitions default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: MSCK REPAIR TABLE default.repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
@@ -30,15 +36,31 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=a/p2=a	repairtable:p1=b/p2=a	repairtable:p1=c/p2=a
-Repair: Added partition to metastore default.repairtable:p1=a/p2=a
-Repair: Added partition to metastore default.repairtable:p1=b/p2=a
-Repair: Added partition to metastore default.repairtable:p1=c/p2=a
+#### A masked pattern was here ####
+PREHOOK: query: show partitions default.repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions default.repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=a/p2=a
+p1=b/p2=a
+p1=c/p2=a
 PREHOOK: query: MSCK TABLE repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable
 POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
+p1=a/p2=a
+p1=b/p2=a
+p1=c/p2=a
 PREHOOK: query: DROP TABLE default.repairtable
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@repairtable
@@ -64,6 +86,12 @@ POSTHOOK: query: MSCK TABLE repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=c/p2=a
+PREHOOK: query: show partitions repairtable
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@repairtable
+POSTHOOK: query: show partitions repairtable
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@repairtable
 PREHOOK: query: DROP TABLE default.repairtable
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@repairtable

http://git-wip-us.apache.org/repos/asf/hive/blob/0efb9368/ql/src/test/results/clientpositive/repair.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/repair.q.out b/ql/src/test/results/clientpositive/repair.q.out
index c183464..581afe8 100644
--- a/ql/src/test/results/clientpositive/repair.q.out
+++ b/ql/src/test/results/clientpositive/repair.q.out
@@ -30,8 +30,7 @@ POSTHOOK: query: MSCK REPAIR TABLE default.repairtable
 POSTHOOK: type: MSCK
 POSTHOOK: Output: default@repairtable
 Partitions not in metastore:	repairtable:p1=a/p2=a	repairtable:p1=b/p2=a
-Repair: Added partition to metastore default.repairtable:p1=a/p2=a
-Repair: Added partition to metastore default.repairtable:p1=b/p2=a
+#### A masked pattern was here ####
 PREHOOK: query: MSCK TABLE repairtable
 PREHOOK: type: MSCK
 PREHOOK: Output: default@repairtable


[42/50] [abbrv] hive git commit: HIVE-16617: Clean up javadoc from errors in module hive-shims (Janos Gub via Zoltan Haindrich)

Posted by we...@apache.org.
HIVE-16617: Clean up javadoc from errors in module hive-shims (Janos Gub via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7827316f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7827316f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7827316f

Branch: refs/heads/hive-14535
Commit: 7827316f0c4cb9e9fe63506687b33bf4b2c4e70a
Parents: 1b8ba02
Author: Janos Gub <gu...@gmail.com>
Authored: Tue May 16 08:25:37 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Tue May 16 08:25:37 2017 +0200

----------------------------------------------------------------------
 .../src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java | 5 ++---
 .../org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java   | 4 ++--
 .../hive/thrift/TokenStoreDelegationTokenSecretManager.java     | 2 +-
 3 files changed, 5 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7827316f/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index 9c6901d..c280d49 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -168,7 +168,6 @@ public interface HadoopShims {
    * All updates to jobtracker/resource manager rpc address
    * in the configuration should be done through this shim
    * @param conf
-   * @return
    */
   public void setJobLauncherRpcAddress(Configuration conf, String val);
 
@@ -252,12 +251,12 @@ public interface HadoopShims {
 
   /**
    * For the block locations returned by getLocations() convert them into a Treemap
-   * <Offset,blockLocation> by iterating over the list of blockLocation.
+   * &lt;Offset,blockLocation&gt; by iterating over the list of blockLocation.
    * Using TreeMap from offset to blockLocation, makes it O(logn) to get a particular
    * block based upon offset.
    * @param fs the file system
    * @param status the file information
-   * @return TreeMap<Long, BlockLocation>
+   * @return TreeMap&lt;Long, BlockLocation&gt;
    * @throws IOException
    */
   TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs,

http://git-wip-us.apache.org/repos/asf/hive/blob/7827316f/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index d420d09..fd86fed 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -169,9 +169,9 @@ public abstract class HadoopThriftAuthBridge {
     /**
      * Create a client-side SASL transport that wraps an underlying transport.
      *
-     * @param method The authentication method to use. Currently only KERBEROS is
+     * @param methodStr The authentication method to use. Currently only KERBEROS is
      *               supported.
-     * @param serverPrincipal The Kerberos principal of the target server.
+     * @param principalConfig The Kerberos principal of the target server.
      * @param underlyingTransport The underlying transport mechanism, usually a TSocket.
      * @param saslProps the sasl properties to create the client with
      */

http://git-wip-us.apache.org/repos/asf/hive/blob/7827316f/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
index 4d910d8..4719b85 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
@@ -184,7 +184,7 @@ public class TokenStoreDelegationTokenSecretManager extends DelegationTokenSecre
 
   /**
    * Synchronize master key updates / sequence generation for multiple nodes.
-   * NOTE: {@Link AbstractDelegationTokenSecretManager} keeps currentKey private, so we need
+   * NOTE: {@link AbstractDelegationTokenSecretManager} keeps currentKey private, so we need
    * to utilize this "hook" to manipulate the key through the object reference.
    * This .20S workaround should cease to exist when Hadoop supports token store.
    */


[33/50] [abbrv] hive git commit: HIVE-1010: Implement INFORMATION_SCHEMA in Hive (Gunther Hagleitner, reviewed by Thejas Nair)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
index 0e6ec84..a02baf9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
@@ -81,7 +81,12 @@ public class HiveIndexedInputFormat extends HiveInputFormat {
       // class
       Class inputFormatClass = part.getInputFileFormatClass();
       InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
-      Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), newjob);
+
+      try {
+        Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), newjob);
+      } catch (HiveException e) {
+        throw new IOException(e);
+      }
 
       FileInputFormat.setInputPaths(newjob, dir);
       newjob.setInputFormat(inputFormat.getClass());

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index 010b88c..21394c6 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -357,9 +357,13 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
       LOG.debug("Found spec for " + hsplit.getPath() + " " + part + " from " + pathToPartitionInfo);
     }
 
-    if ((part != null) && (part.getTableDesc() != null)) {
-      Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);
-      nonNative = part.getTableDesc().isNonNative();
+    try {
+      if ((part != null) && (part.getTableDesc() != null)) {
+        Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);
+        nonNative = part.getTableDesc().isNonNative();
+      }
+    } catch (HiveException e) {
+      throw new IOException(e);
     }
 
     Path splitPath = hsplit.getPath();
@@ -419,7 +423,11 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
       InputFormat inputFormat, Class<? extends InputFormat> inputFormatClass, int splits,
       TableDesc table, List<InputSplit> result) throws IOException {
 
-    Utilities.copyTablePropertiesToConf(table, conf);
+    try {
+      Utilities.copyTablePropertiesToConf(table, conf);
+    } catch (HiveException e) {
+      throw new IOException(e);
+    }
 
     if (tableScan != null) {
       pushFilters(conf, tableScan);

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
index 68407f5..42f9b66 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
@@ -183,9 +183,14 @@ public class ProjectionPusher {
     final JobConf cloneJobConf = new JobConf(jobConf);
     final PartitionDesc part = pathToPartitionInfo.get(path);
 
-    if ((part != null) && (part.getTableDesc() != null)) {
-      Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
+    try {
+      if ((part != null) && (part.getTableDesc() != null)) {
+        Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
+      }
+    } catch (Exception e) {
+      throw new IOException(e);
     }
+
     pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath());
     return cloneJobConf;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
index 82b78b8..e87a96d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
@@ -93,6 +93,11 @@ public class DefaultStorageHandler implements HiveStorageHandler {
   }
 
   @Override
+  public void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> secrets) {
+    //do nothing by default
+  }
+
+  @Override
   public Configuration getConf() {
     return conf;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
index 5975d0c..bd8c60a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
@@ -99,6 +99,12 @@ public interface HiveStorageHandler extends Configurable {
     Map<String, String> jobProperties);
 
   /**
+   * This method is called to allow the StorageHandlers the chance to
+   * populate secret keys into the job's credentials.
+   */
+  public abstract void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> secrets);
+
+  /**
    * This method is called to allow the StorageHandlers the chance
    * to populate the JobContext.getConfiguration() with properties that
    * maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc).

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
index 4ca8329..9a0a74d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
@@ -116,7 +116,6 @@ public class UnparseTranslator {
       if (existingEntry.getValue().tokenStopIndex <= tokenStopIndex &&
             existingEntry.getKey() >= tokenStartIndex) {
         // Collect newer entry is if a super-set of existing entry,
-        assert (replacementText.contains(existingEntry.getValue().replacementText));
         subsetEntries.add(existingEntry.getKey());
         // check if the existing entry contains the new
       } else if (existingEntry.getValue().tokenStopIndex >= tokenStopIndex &&

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
index 68dcd0d..157a697 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
@@ -116,7 +116,11 @@ public class PartitionDesc implements Serializable, Cloneable {
 
   private void PartitionDescConstructorHelper(final Partition part,final TableDesc tblDesc, boolean setInputFileFormat)
     throws HiveException {
+
+    PlanUtils.configureInputJobPropertiesForStorageHandler(tblDesc);
+
     this.tableDesc = tblDesc;
+
     setPartSpec(part.getSpec());
     if (setInputFileFormat) {
       setInputFileFormatClass(part.getInputFormatClass());
@@ -367,7 +371,6 @@ public class PartitionDesc implements Serializable, Cloneable {
    *          URI to the partition file
    */
   public void deriveBaseFileName(Path path) {
-    PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc);
 
     if (path == null) {
       return;

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index 14f2a12..d82973c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -896,6 +896,7 @@ public final class PlanUtils {
             org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
       if (storageHandler != null) {
         Map<String, String> jobProperties = new LinkedHashMap<String, String>();
+        Map<String, String> jobSecrets = new LinkedHashMap<String, String>();
         if(input) {
             try {
                 storageHandler.configureInputJobProperties(
@@ -906,6 +907,15 @@ public final class PlanUtils {
                     "using configureTableJobProperties",e);
                 storageHandler.configureTableJobProperties(tableDesc, jobProperties);
             }
+
+            try{
+              storageHandler.configureInputJobCredentials(
+                tableDesc,
+                jobSecrets);
+            } catch(AbstractMethodError e) {
+              // ignore
+              LOG.info("configureInputJobSecrets not found");
+            }
         }
         else {
             try {
@@ -924,6 +934,11 @@ public final class PlanUtils {
         if (!jobProperties.isEmpty()) {
           tableDesc.setJobProperties(jobProperties);
         }
+
+        // same idea, only set for non-native tables
+        if (!jobSecrets.isEmpty()) {
+          tableDesc.setJobSecrets(jobSecrets);
+        }
       }
     } catch (HiveException ex) {
       throw new RuntimeException(ex);

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
index 117aa14..8b7339d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
@@ -53,6 +53,7 @@ public class TableDesc implements Serializable, Cloneable {
   private Class<? extends OutputFormat> outputFileFormatClass;
   private java.util.Properties properties;
   private Map<String, String> jobProperties;
+  private Map<String, String> jobSecrets;
 
   public TableDesc() {
   }
@@ -143,6 +144,14 @@ public class TableDesc implements Serializable, Cloneable {
     return jobProperties;
   }
 
+  public void setJobSecrets(Map<String, String> jobSecrets) {
+    this.jobSecrets = jobSecrets;
+  }
+
+  public Map<String, String> getJobSecrets() {
+    return jobSecrets;
+  }
+
   /**
    * @return the serdeClassName
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
index ed88725..18f77e0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
@@ -97,7 +97,7 @@ public final class OpProcFactory {
   protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class
     .getName());
 
-  private static ExprWalkerInfo getChildWalkerInfo(Operator<?> current, OpWalkerInfo owi) {
+  private static ExprWalkerInfo getChildWalkerInfo(Operator<?> current, OpWalkerInfo owi) throws SemanticException {
     if (current.getNumChild() == 0) {
       return null;
     }
@@ -875,7 +875,7 @@ public final class OpProcFactory {
   }
 
   protected static Object createFilter(Operator op,
-      ExprWalkerInfo pushDownPreds, OpWalkerInfo owi) {
+      ExprWalkerInfo pushDownPreds, OpWalkerInfo owi) throws SemanticException {
     if (pushDownPreds != null && pushDownPreds.hasAnyCandidates()) {
       return createFilter(op, pushDownPreds.getFinalCandidates(), owi);
     }
@@ -883,7 +883,7 @@ public final class OpProcFactory {
   }
 
   protected static Object createFilter(Operator op,
-      Map<String, List<ExprNodeDesc>> predicates, OpWalkerInfo owi) {
+      Map<String, List<ExprNodeDesc>> predicates, OpWalkerInfo owi) throws SemanticException {
     RowSchema inputRS = op.getSchema();
 
     // combine all predicates into a single expression
@@ -970,7 +970,7 @@ public final class OpProcFactory {
     TableScanOperator tableScanOp,
     ExprNodeGenericFuncDesc originalPredicate,
     OpWalkerInfo owi,
-    HiveConf hiveConf) {
+    HiveConf hiveConf) throws SemanticException {
 
     TableScanDesc tableScanDesc = tableScanOp.getConf();
     Table tbl = tableScanDesc.getTableMetadata();
@@ -997,9 +997,15 @@ public final class OpProcFactory {
     JobConf jobConf = new JobConf(owi.getParseContext().getConf());
     Utilities.setColumnNameList(jobConf, tableScanOp);
     Utilities.setColumnTypeList(jobConf, tableScanOp);
-    Utilities.copyTableJobPropertiesToConf(
-      Utilities.getTableDesc(tbl),
-      jobConf);
+
+    try {
+      Utilities.copyTableJobPropertiesToConf(
+        Utilities.getTableDesc(tbl),
+        jobConf);
+    } catch (Exception e) {
+      throw new SemanticException(e);
+    }
+
     Deserializer deserializer = tbl.getDeserializer();
     HiveStoragePredicateHandler.DecomposedPredicate decomposed =
       predicateHandler.decomposePredicate(

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/test/org/apache/hadoop/hive/ql/exec/InputEstimatorTestClass.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/InputEstimatorTestClass.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/InputEstimatorTestClass.java
index 8c52979..f3f4388 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/InputEstimatorTestClass.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/InputEstimatorTestClass.java
@@ -75,6 +75,11 @@ public class InputEstimatorTestClass implements HiveStorageHandler, InputEstimat
   }
 
   @Override
+  public void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> jobProperties) {
+
+  }
+
+  @Override
   public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/test/queries/clientpositive/jdbc_handler.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/jdbc_handler.q b/ql/src/test/queries/clientpositive/jdbc_handler.q
index a37e547..847f577 100644
--- a/ql/src/test/queries/clientpositive/jdbc_handler.q
+++ b/ql/src/test/queries/clientpositive/jdbc_handler.q
@@ -9,31 +9,24 @@ owner STRING
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
 TBLPROPERTIES (
-"hive.sql.database.type" = "DERBY",
-"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
-"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.database.type" = "METASTORE",
 "hive.sql.query" = "SELECT TBL_ID, DB_ID, TBL_NAME, TBL_TYPE, OWNER FROM TBLS",
-"hive.sql.column.mapping" = "id=TBL_ID, db_id=DB_ID, name=TBL_NAME, type=TBL_TYPE, owner=OWNER",
-"hive.sql.dbcp.maxActive" = "1"
+"hive.sql.column.mapping" = "id=TBL_ID, db_id=DB_ID, name=TBL_NAME, type=TBL_TYPE, owner=OWNER"
 );
 
 CREATE EXTERNAL TABLE dbs
 (
-id int,
-name STRING
+DB_ID int,
+NAME STRING
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
 TBLPROPERTIES (
-"hive.sql.database.type" = "DERBY",
-"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
-"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
-"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS",
-"hive.sql.column.mapping" = "id=DB_ID, name=NAME",
-"hive.sql.dbcp.maxActive" = "1"
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS"
 );
 
-select tables.name as tn, dbs.name as dn, tables.type as t
-from tables join dbs on (tables.db_id = dbs.id) order by tn, dn, t;
+select tables.name as tn, dbs.NAME as dn, tables.type as t
+from tables join dbs on (tables.db_id = dbs.DB_ID) order by tn, dn, t;
 
 explain
 select

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/test/queries/clientpositive/sysdb.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/sysdb.q b/ql/src/test/queries/clientpositive/sysdb.q
new file mode 100644
index 0000000..d94a164
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/sysdb.q
@@ -0,0 +1,117 @@
+set hive.strict.checks.cartesian.product=false;
+
+set hive.compute.query.using.stats=false;
+
+set hive.support.concurrency=true;
+
+set hive.cbo.enable=false;
+
+create table src_buck (key int, value string) clustered by(value) into 2 buckets;
+
+create table src_skew (key int) skewed by (key) on (1,2,3);
+
+CREATE TABLE scr_txn (key int, value string)
+    CLUSTERED BY (key) INTO 2 BUCKETS STORED AS ORC
+    TBLPROPERTIES (
+      "transactional"="true",
+      "compactor.mapreduce.map.memory.mb"="2048",
+      "compactorthreshold.hive.compactor.delta.num.threshold"="4",
+      "compactorthreshold.hive.compactor.delta.pct.threshold"="0.5");
+
+CREATE TEMPORARY TABLE src_tmp (key int, value string);
+
+CREATE TABLE moretypes (a decimal(10,2), b tinyint, c smallint, d int, e bigint, f varchar(10), g char(3));
+
+show grant user hive_test_user;
+
+source ../../metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql;
+
+use sys;
+
+select bucket_col_name, integer_idx from bucketing_cols order by bucket_col_name, integer_idx limit 5;
+
+select count(*) from cds;
+
+select column_name, type_name, integer_idx from columns_v2 order by column_name, integer_idx limit 5;
+
+select param_key, param_value from database_params order by param_key, param_value limit 5;
+
+select db_location_uri, name, owner_name, owner_type from dbs order by name;
+
+select grantor, principal_name from db_privs order by grantor, principal_name limit 5;
+
+select grantor, principal_name from global_privs order by grantor, principal_name limit 5;
+
+select index_name, index_handler_class from idxs order by index_name limit 5;
+
+select param_key, param_value from index_params order by param_key, param_value limit 5;
+
+select part_name from partitions order by part_name limit 5;
+
+select pkey_name, pkey_type from partition_keys order by pkey_name limit 5;
+
+select part_key_val, integer_idx from partition_key_vals order by part_key_val, integer_idx limit 5;
+
+select param_key, param_value from partition_params order by param_key, param_value limit 5;
+
+select grantor, principal_name from part_col_privs order by grantor, principal_name limit 5;
+
+select grantor, principal_name from part_privs order by grantor, principal_name limit 5;
+
+select role_name from roles order by role_name limit 5;
+
+select principal_name, grantor from role_map order by principal_name, grantor limit 5;
+
+select count(*) from sds;
+
+select param_key, param_value from sd_params order by param_key, param_value limit 5;
+
+select sequence_name from sequence_table order by sequence_name limit 5;
+
+select name, slib from serdes order by name, slib limit 5;
+
+select param_key, param_value from serde_params order by param_key, param_value limit 5;
+
+select skewed_col_name from skewed_col_names order by skewed_col_name limit 5;
+
+select count(*) from skewed_col_value_loc_map;
+
+select count(*) from skewed_string_list;
+
+select count(*) from skewed_string_list_values;
+
+select count(*) from skewed_values;
+
+select column_name, `order` from sort_cols order by column_name limit 5;
+
+select param_key, param_value from table_params order by param_key, param_value limit 5;
+
+select tbl_name from tbls order by tbl_name limit 5;
+
+select column_name, grantor, principal_name from tbl_col_privs order by column_name, principal_name limit 5;
+
+select grantor, principal_name from tbl_privs order by grantor, principal_name limit 5;
+
+select table_name, column_name, num_nulls, num_distincts from tab_col_stats order by table_name, column_name limit 10;
+
+select table_name, partition_name, column_name, num_nulls, num_distincts from part_col_stats order by table_name, partition_name, column_name limit 10;
+
+select schema_version from version order by schema_version limit 5;
+
+select func_name, func_type from funcs order by func_name, func_type limit 5;
+
+select constraint_name from key_constraints order by constraint_name limit 5;
+
+use INFORMATION_SCHEMA;
+
+select count(*) from SCHEMATA;
+
+select * from TABLES order by TABLE_SCHEMA, TABLE_NAME;
+
+select * from TABLE_PRIVILEGES order by GRANTOR, GRANTEE, TABLE_SCHEMA, TABLE_NAME limit 10;
+
+select * from COLUMNS where TABLE_NAME = 'alltypesorc' or TABLE_NAME = 'moretypes' order by TABLE_SCHEMA, TABLE_NAME, ORDINAL_POSITION ;
+
+select * from COLUMN_PRIVILEGES order by GRANTOR, GRANTEE, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME limit 10;
+
+select TABLE_SCHEMA, TABLE_NAME from views order by TABLE_SCHEMA, TABLE_NAME;

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
index 483b7f9..7c428e8 100644
--- a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
+++ b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
@@ -8,12 +8,9 @@ type STRING,
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
 TBLPROPERTIES (
-"hive.sql.database.type" = "DERBY",
-"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
-"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.database.type" = "METASTORE",
 "hive.sql.query" = "SELECT TBL_ID, DB_ID, TBL_NAME, TBL_TYPE, OWNER FROM TBLS",
 #### A masked pattern was here ####
-"hive.sql.dbcp.maxActive" = "1"
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
@@ -28,58 +25,47 @@ type STRING,
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
 TBLPROPERTIES (
-"hive.sql.database.type" = "DERBY",
-"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
-"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.database.type" = "METASTORE",
 "hive.sql.query" = "SELECT TBL_ID, DB_ID, TBL_NAME, TBL_TYPE, OWNER FROM TBLS",
 #### A masked pattern was here ####
-"hive.sql.dbcp.maxActive" = "1"
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tables
 PREHOOK: query: CREATE EXTERNAL TABLE dbs
 (
-id int,
-name STRING
+DB_ID int,
+NAME STRING
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
 TBLPROPERTIES (
-"hive.sql.database.type" = "DERBY",
-"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
-"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
-"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS",
-"hive.sql.column.mapping" = "id=DB_ID, name=NAME",
-"hive.sql.dbcp.maxActive" = "1"
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS"
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@dbs
 POSTHOOK: query: CREATE EXTERNAL TABLE dbs
 (
-id int,
-name STRING
+DB_ID int,
+NAME STRING
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
 TBLPROPERTIES (
-"hive.sql.database.type" = "DERBY",
-"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
-"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
-"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS",
-"hive.sql.column.mapping" = "id=DB_ID, name=NAME",
-"hive.sql.dbcp.maxActive" = "1"
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS"
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dbs
-PREHOOK: query: select tables.name as tn, dbs.name as dn, tables.type as t
-from tables join dbs on (tables.db_id = dbs.id) order by tn, dn, t
+PREHOOK: query: select tables.name as tn, dbs.NAME as dn, tables.type as t
+from tables join dbs on (tables.db_id = dbs.DB_ID) order by tn, dn, t
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dbs
 PREHOOK: Input: default@tables
 #### A masked pattern was here ####
-POSTHOOK: query: select tables.name as tn, dbs.name as dn, tables.type as t
-from tables join dbs on (tables.db_id = dbs.id) order by tn, dn, t
+POSTHOOK: query: select tables.name as tn, dbs.NAME as dn, tables.type as t
+from tables join dbs on (tables.db_id = dbs.DB_ID) order by tn, dn, t
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dbs
 POSTHOOK: Input: default@tables


[43/50] [abbrv] hive git commit: HIVE-16618: Clean up javadoc from errors in module hive-common (Janos Gub via Zoltan Haindrich)

Posted by we...@apache.org.
HIVE-16618: Clean up javadoc from errors in module hive-common (Janos Gub via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/202c5137
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/202c5137
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/202c5137

Branch: refs/heads/hive-14535
Commit: 202c5137f2b73ead25d6907d1f74a8857690440e
Parents: 7827316
Author: Janos Gub <gu...@gmail.com>
Authored: Tue May 16 08:27:39 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Tue May 16 08:27:39 2017 +0200

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/common/CompressionUtils.java |  8 ++++----
 .../java/org/apache/hadoop/hive/common/JavaUtils.java   |  4 ++--
 .../org/apache/hadoop/hive/common/StatsSetupConst.java  |  6 +++---
 .../hadoop/hive/common/ValidCompactorTxnList.java       |  4 ++--
 .../hive/common/classification/RetrySemantics.java      |  1 -
 .../apache/hadoop/hive/common/cli/CommonCliOptions.java |  4 ++--
 .../hive/common/metrics/common/MetricsVariable.java     |  2 +-
 .../org/apache/hadoop/hive/common/type/Decimal128.java  | 12 +++++-------
 .../java/org/apache/hadoop/hive/conf/HiveConfUtil.java  |  3 ++-
 .../org/apache/hive/common/util/HiveStringUtils.java    |  4 ++--
 .../apache/hive/common/util/ShutdownHookManager.java    |  2 +-
 common/src/java/org/apache/hive/http/HttpServer.java    |  6 ++++--
 .../src/java/org/apache/hive/http/JMXJsonServlet.java   |  6 +++---
 13 files changed, 31 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
index d26207d..c4f2297 100644
--- a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java
@@ -112,8 +112,8 @@ public class CompressionUtils {
    * The output file is created in the output folder, having the same name as the input file, minus
    * the '.tar' extension.
    *
-   * @param inputFile the input .tar file
-   * @param outputDir the output directory file.
+   * @param inputFileName the input .tar file
+   * @param outputDirName the output directory file.
    * @throws IOException
    * @throws FileNotFoundException
    *
@@ -131,8 +131,8 @@ public class CompressionUtils {
    * The output file is created in the output folder, having the same name as the input file, minus
    * the '.tar' extension.
    *
-   * @param inputFile the input .tar file
-   * @param outputDir the output directory file.
+   * @param inputFileName the input .tar file
+   * @param outputDirName the output directory file.
    * @throws IOException
    * @throws FileNotFoundException
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index 3916fe3..b224d26 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -138,14 +138,14 @@ public final class JavaUtils {
 
   /**
    * Utility method for ACID to normalize logging info.  Matches
-   * {@link org.apache.hadoop.hive.metastore.api.LockRequest#toString()}
+   * org.apache.hadoop.hive.metastore.api.LockRequest#toString
    */
   public static String lockIdToString(long extLockId) {
     return "lockid:" + extLockId;
   }
   /**
    * Utility method for ACID to normalize logging info.  Matches
-   * {@link org.apache.hadoop.hive.metastore.api.LockResponse#toString()}
+   * org.apache.hadoop.hive.metastore.api.LockResponse#toString
    */
   public static String txnIdToString(long txnId) {
     return "txnid:" + txnId;

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
index a9e17c2..2387407 100644
--- a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
+++ b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java
@@ -110,18 +110,18 @@ public class StatsSetupConst {
 
   public static final String STATS_FILE_PREFIX = "tmpstats-";
   /**
-   * @return List of all supported statistics
+   * List of all supported statistics
    */
   public static final String[] supportedStats = {NUM_FILES,ROW_COUNT,TOTAL_SIZE,RAW_DATA_SIZE};
 
   /**
-   * @return List of all statistics that need to be collected during query execution. These are
+   * List of all statistics that need to be collected during query execution. These are
    * statistics that inherently require a scan of the data.
    */
   public static final String[] statsRequireCompute = new String[] {ROW_COUNT,RAW_DATA_SIZE};
 
   /**
-   * @return List of statistics that can be collected quickly without requiring a scan of the data.
+   * List of statistics that can be collected quickly without requiring a scan of the data.
    */
   public static final String[] fastStats = new String[] {NUM_FILES,TOTAL_SIZE};
 

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java b/common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java
index 8f55354..c022577 100644
--- a/common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java
+++ b/common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java
@@ -32,7 +32,7 @@ import java.util.BitSet;
  * open transaction when choosing which files to compact, but that it still ignores aborted
  * records when compacting.
  * 
- * See {@link org.apache.hadoop.hive.metastore.txn.TxnUtils#createValidCompactTxnList()} for proper
+ * See org.apache.hadoop.hive.metastore.txn.TxnUtils#createValidCompactTxnList() for proper
  * way to construct this.
  */
 public class ValidCompactorTxnList extends ValidReadTxnList {
@@ -70,7 +70,7 @@ public class ValidCompactorTxnList extends ValidReadTxnList {
     super(value);
   }
   /**
-   * Returns {@link org.apache.hadoop.hive.common.ValidTxnList.RangeResponse.ALL} if all txns in
+   * Returns org.apache.hadoop.hive.common.ValidTxnList.RangeResponse.ALL if all txns in
    * the range are resolved and RangeResponse.NONE otherwise
    */
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java b/common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java
index 5883b01..f1c3946 100644
--- a/common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java
+++ b/common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java
@@ -28,7 +28,6 @@ import java.lang.annotation.Target;
  * Initially meant for Metastore API when made across a network, i.e. asynchronously where
  * the response may not reach the caller and thus it cannot know if the operation was actually
  * performed on the server.
- * @see RetryingMetastoreClient
  */
 @InterfaceStability.Evolving
 @InterfaceAudience.LimitedPrivate("Hive developer")

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java b/common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java
index 5a991ec..cc2ca6e 100644
--- a/common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java
+++ b/common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java
@@ -35,8 +35,8 @@ import org.apache.logging.log4j.Level;
  * all your own options or processing instructions), parse, and then use
  * the resulting information.
  * <p>
- * See {@link org.apache.hadoop.hive.service.HiveServer} or
- *     {@link org.apache.hadoop.hive.metastore.HiveMetaStore}
+ * See org.apache.hadoop.hive.service.HiveServer or
+ *     org.apache.hadoop.hive.metastore.HiveMetaStore
  *     for examples of use.
  *
  */

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java
index 8cf6608..7fd8eda 100644
--- a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java
+++ b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java
@@ -18,7 +18,7 @@
 package org.apache.hadoop.hive.common.metrics.common;
 
 /**
- * Interface for metrics variables. <p/> For example a the database service could expose the number of
+ * Interface for metrics variables. For example a the database service could expose the number of
  * currently active connections.
  */
 public interface MetricsVariable<T> {

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java b/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
index 3b3e918..9face96 100644
--- a/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
+++ b/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java
@@ -1316,15 +1316,13 @@ public final class Decimal128 extends Number implements Comparable<Decimal128> {
     *   x - IntegerPart(x / p, resultScale) * p
     * </p>
     *
-    * @left
+    * @param left
     *    is x
-    * @right
+    * @param right
     *    is p
-    * @result
+    * @param result
     *    receives the result
-    * @scratch
-    *    scratch space to avoid need to create a new object
-    * @scale
+    * @param scale
     *    scale of result
     */
    public static void modulo(Decimal128 left, Decimal128 right, Decimal128 result,
@@ -1856,7 +1854,7 @@ public final class Decimal128 extends Number implements Comparable<Decimal128> {
   /**
    * Update the value to a decimal value with the decimal point equal to
    * val but with the decimal point inserted scale
-   * digits from the right. Behavior is undefined if scale is > 38 or < 0.
+   * digits from the right. Behavior is undefined if scale is &gt; 38 or &lt; 0.
    *
    * For example, updateFixedPoint(123456789L, (short) 3) changes the target
    * to the value 123456.789 with scale 3.

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
index dc02803..9084fed 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
@@ -151,6 +151,7 @@ public class HiveConfUtil {
    * password is through a file which stores the password in clear-text which needs to be readable
    * by all the consumers and therefore is not supported.
    *
+   *<ul>
    * <li>If HIVE_SERVER2_JOB_CREDENTIAL_PROVIDER_PATH is set in the hive configuration this method
    * overrides the MR job configuration property hadoop.security.credential.provider.path with its
    * value. If not set then it does not change the value of hadoop.security.credential.provider.path
@@ -161,7 +162,7 @@ public class HiveConfUtil {
    *   (2) If password is not set using (1) above we use HADOOP_CREDSTORE_PASSWORD if it is set.
    *   (3) If none of those are set, we do not set any password in the MR task environment. In this
    *       case the hadoop credential provider should use the default password of "none" automatically
-   *
+   *</ul>
    * @param jobConf - job specific configuration
    */
   public static void updateJobCredentialProviders(Configuration jobConf) {

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
index f6dc73a..4a6413a 100644
--- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
+++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -426,7 +426,7 @@ public class HiveStringUtils {
 
   /**
    * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
-   * @param str a comma separated <String> with values
+   * @param str a comma separated <code>String</code> with values
    * @return a <code>Collection</code> of <code>String</code> values
    */
   public static Collection<String> getTrimmedStringCollection(String str){
@@ -436,7 +436,7 @@ public class HiveStringUtils {
 
   /**
    * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
-   * @param str a comma separated <String> with values
+   * @param str a comma separated <code>String</code> with values
    * @return an array of <code>String</code> values
    */
   public static String[] getTrimmedStrings(String str){

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
index 6585e3b..0b11d10 100644
--- a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
+++ b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
@@ -89,7 +89,7 @@ public class ShutdownHookManager {
   /**
    * register file to delete-on-exit hook
    *
-   * @see {@link org.apache.hadoop.hive.common.FileUtils#createTempFile}
+   * {@link org.apache.hadoop.hive.common.FileUtils#createTempFile}
    */
   public static void deleteOnExit(File file) {
     if (MGR.isShutdownInProgress()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hive/http/HttpServer.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/http/HttpServer.java b/common/src/java/org/apache/hive/http/HttpServer.java
index fd3d457..0bc0032 100644
--- a/common/src/java/org/apache/hive/http/HttpServer.java
+++ b/common/src/java/org/apache/hive/http/HttpServer.java
@@ -216,13 +216,15 @@ public class HttpServer {
 
   /**
    * Checks the user has privileges to access to instrumentation servlets.
-   * <p/>
+   * <p>
    * If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE
    * (default value) it always returns TRUE.
-   * <p/>
+   * </p>
+   * <p>
    * If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE
    * it will check if the current user is in the admin ACLS. If the user is
    * in the admin ACLs it returns TRUE, otherwise it returns FALSE.
+   * </p>
    *
    * @param servletContext the servlet context.
    * @param request the servlet request.

http://git-wip-us.apache.org/repos/asf/hive/blob/202c5137/common/src/java/org/apache/hive/http/JMXJsonServlet.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/http/JMXJsonServlet.java b/common/src/java/org/apache/hive/http/JMXJsonServlet.java
index 7535b26..219db53 100644
--- a/common/src/java/org/apache/hive/http/JMXJsonServlet.java
+++ b/common/src/java/org/apache/hive/http/JMXJsonServlet.java
@@ -70,7 +70,7 @@ import org.codehaus.jackson.JsonGenerator;
  * <p>
  * The optional <code>get</code> parameter is used to query a specific
  * attribute of a JMX bean.  The format of the URL is
- * <code>http://.../jmx?get=MXBeanName::AttributeName<code>
+ * <code>http://.../jmx?get=MXBeanName::AttributeName</code>
  * <p>
  * For example 
  * <code>
@@ -85,7 +85,7 @@ import org.codehaus.jackson.JsonGenerator;
  * <p>
  * The return format is JSON and in the form
  * <p>
- *  <code><pre>
+ *  <code>
  *  {
  *    "beans" : [
  *      {
@@ -94,7 +94,7 @@ import org.codehaus.jackson.JsonGenerator;
  *      }
  *    ]
  *  }
- *  </pre></code>
+ *  </code>
  *  <p>
  *  The servlet attempts to convert the JMXBeans into JSON. Each
  *  bean's attributes will be converted to a JSON object member.


[17/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query16.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query16.q.out b/ql/src/test/results/clientpositive/perf/query16.q.out
index 05b1871..cf90c0c 100644
--- a/ql/src/test/results/clientpositive/perf/query16.q.out
+++ b/ql/src/test/results/clientpositive/perf/query16.q.out
@@ -1,4 +1,4 @@
-Warning: Shuffle Join MERGEJOIN[106][tables = [$hdt$_2, $hdt$_3, $hdt$_1, $hdt$_4]] in Stage 'Reducer 18' is a cross product
+Warning: Shuffle Join MERGEJOIN[106][tables = [$hdt$_2, $hdt$_3, $hdt$_1, $hdt$_4]] in Stage 'Reducer 17' is a cross product
 PREHOOK: query: explain select  
    count(distinct cs_order_number) as `order count`
   ,sum(cs_ext_ship_cost) as `total shipping cost`
@@ -61,10 +61,10 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 13 <- Map 12 (SIMPLE_EDGE)
-Reducer 15 <- Map 14 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 15 <- Map 14 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
 Reducer 16 <- Reducer 15 (SIMPLE_EDGE)
-Reducer 18 <- Map 17 (CUSTOM_SIMPLE_EDGE), Map 20 (CUSTOM_SIMPLE_EDGE), Map 21 (CUSTOM_SIMPLE_EDGE), Map 22 (CUSTOM_SIMPLE_EDGE)
-Reducer 19 <- Reducer 18 (SIMPLE_EDGE)
+Reducer 17 <- Map 14 (CUSTOM_SIMPLE_EDGE), Map 19 (CUSTOM_SIMPLE_EDGE), Map 20 (CUSTOM_SIMPLE_EDGE), Map 21 (CUSTOM_SIMPLE_EDGE)
+Reducer 18 <- Reducer 17 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
 Reducer 3 <- Map 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
@@ -116,27 +116,32 @@ Stage-0
                                         Merge Join Operator [MERGEJOIN_110] (rows=5072854730221289472 width=1)
                                           Conds:RS_39._col1=RS_40._col1(Inner),Output:["_col0","_col2","_col3"]
                                         <-Map 14 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_39]
+                                          PARTITION_ONLY_SHUFFLE [RS_39]
                                             PartitionCols:_col1
                                             Select Operator [SEL_20] (rows=287989836 width=135)
                                               Output:["_col0","_col1"]
                                               TableScan [TS_19] (rows=287989836 width=135)
                                                 default@catalog_sales,cs2,Tbl:COMPLETE,Col:NONE,Output:["cs_warehouse_sk","cs_order_number"]
-                                        <-Reducer 19 [SIMPLE_EDGE]
+                                        <-Reducer 18 [SIMPLE_EDGE]
                                           SHUFFLE [RS_40]
                                             PartitionCols:_col1
                                             Select Operator [SEL_38] (rows=4611686018427387903 width=1)
                                               Output:["_col0","_col1"]
                                               Group By Operator [GBY_37] (rows=4611686018427387903 width=1)
                                                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                              <-Reducer 18 [SIMPLE_EDGE]
+                                              <-Reducer 17 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_36]
                                                   PartitionCols:_col0, _col1
                                                   Group By Operator [GBY_35] (rows=9223372036854775807 width=1)
                                                     Output:["_col0","_col1"],keys:_col4, _col3
                                                     Merge Join Operator [MERGEJOIN_106] (rows=9223372036854775807 width=1)
                                                       Conds:(Inner),(Inner),(Inner),Output:["_col3","_col4"]
-                                                    <-Map 17 [CUSTOM_SIMPLE_EDGE]
+                                                    <-Map 14 [CUSTOM_SIMPLE_EDGE]
+                                                      PARTITION_ONLY_SHUFFLE [RS_32]
+                                                        Select Operator [SEL_28] (rows=287989836 width=135)
+                                                          Output:["_col0","_col1"]
+                                                           Please refer to the previous TableScan [TS_19]
+                                                    <-Map 19 [CUSTOM_SIMPLE_EDGE]
                                                       PARTITION_ONLY_SHUFFLE [RS_29]
                                                         Select Operator [SEL_22] (rows=73049 width=4)
                                                           TableScan [TS_21] (rows=73049 width=1119)
@@ -151,12 +156,6 @@ Stage-0
                                                         Select Operator [SEL_26] (rows=40000000 width=4)
                                                           TableScan [TS_25] (rows=40000000 width=1014)
                                                             default@customer_address,customer_address,Tbl:COMPLETE,Col:COMPLETE
-                                                    <-Map 22 [CUSTOM_SIMPLE_EDGE]
-                                                      PARTITION_ONLY_SHUFFLE [RS_32]
-                                                        Select Operator [SEL_28] (rows=287989836 width=135)
-                                                          Output:["_col0","_col1"]
-                                                          TableScan [TS_27] (rows=287989836 width=135)
-                                                            default@catalog_sales,cs1,Tbl:COMPLETE,Col:NONE,Output:["cs_warehouse_sk","cs_order_number"]
                           <-Reducer 5 [SIMPLE_EDGE]
                             SHUFFLE [RS_60]
                               PartitionCols:_col3, _col4

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query17.q.out b/ql/src/test/results/clientpositive/perf/query17.q.out
index 651ba57..2f32a9f 100644
--- a/ql/src/test/results/clientpositive/perf/query17.q.out
+++ b/ql/src/test/results/clientpositive/perf/query17.q.out
@@ -6,14 +6,14 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 10 <- Map 14 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 15 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
-Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 17 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 15 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-Reducer 9 <- Map 13 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 9 <- Map 7 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -38,7 +38,7 @@ Stage-0
                       Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],aggregations:["count(_col15)","avg(_col15)","stddev_samp(_col15)","count(_col20)","avg(_col20)","stddev_samp(_col20)","count(_col3)","avg(_col3)","stddev_samp(_col3)"],keys:_col24, _col25, _col7
                       Merge Join Operator [MERGEJOIN_100] (rows=1020411534 width=88)
                         Conds:RS_43._col11=RS_44._col0(Inner),Output:["_col3","_col7","_col15","_col20","_col24","_col25"]
-                      <-Map 17 [SIMPLE_EDGE]
+                      <-Map 15 [SIMPLE_EDGE]
                         SHUFFLE [RS_44]
                           PartitionCols:_col0
                           Select Operator [SEL_36] (rows=462000 width=1436)
@@ -52,14 +52,14 @@ Stage-0
                           PartitionCols:_col11
                           Merge Join Operator [MERGEJOIN_99] (rows=927646829 width=88)
                             Conds:RS_40._col1, _col2=RS_41._col12, _col11(Inner),Output:["_col3","_col7","_col11","_col15","_col20"]
-                          <-Reducer 12 [SIMPLE_EDGE]
+                          <-Reducer 10 [SIMPLE_EDGE]
                             SHUFFLE [RS_41]
                               PartitionCols:_col12, _col11
                               Select Operator [SEL_33] (rows=843315281 width=88)
                                 Output:["_col1","_col5","_col9","_col11","_col12","_col14"]
                                 Merge Join Operator [MERGEJOIN_98] (rows=843315281 width=88)
                                   Conds:RS_30._col3=RS_31._col0(Inner),Output:["_col1","_col5","_col7","_col8","_col10","_col16"]
-                                <-Map 16 [SIMPLE_EDGE]
+                                <-Map 14 [SIMPLE_EDGE]
                                   SHUFFLE [RS_31]
                                     PartitionCols:_col0
                                     Select Operator [SEL_20] (rows=1704 width=1910)
@@ -68,39 +68,47 @@ Stage-0
                                         predicate:s_store_sk is not null
                                         TableScan [TS_18] (rows=1704 width=1910)
                                           default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_state"]
-                                <-Reducer 11 [SIMPLE_EDGE]
+                                <-Reducer 9 [SIMPLE_EDGE]
                                   SHUFFLE [RS_30]
                                     PartitionCols:_col3
                                     Merge Join Operator [MERGEJOIN_97] (rows=766650239 width=88)
                                       Conds:RS_27._col6=RS_28._col0(Inner),Output:["_col1","_col3","_col5","_col7","_col8","_col10"]
-                                    <-Map 15 [SIMPLE_EDGE]
+                                    <-Map 7 [SIMPLE_EDGE]
                                       SHUFFLE [RS_28]
                                         PartitionCols:_col0
                                         Select Operator [SEL_17] (rows=36525 width=1119)
                                           Output:["_col0"]
                                           Filter Operator [FIL_91] (rows=36525 width=1119)
                                             predicate:((d_quarter_name) IN ('2000Q1', '2000Q2', '2000Q3') and d_date_sk is not null)
-                                            TableScan [TS_15] (rows=73049 width=1119)
-                                              default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_quarter_name"]
-                                    <-Reducer 10 [SIMPLE_EDGE]
+                                            TableScan [TS_3] (rows=73049 width=1119)
+                                              default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_quarter_name"]
+                                    <-Reducer 8 [SIMPLE_EDGE]
                                       SHUFFLE [RS_27]
                                         PartitionCols:_col6
                                         Merge Join Operator [MERGEJOIN_96] (rows=696954748 width=88)
                                           Conds:RS_24._col0=RS_25._col0(Inner),Output:["_col1","_col3","_col5","_col6","_col7","_col8","_col10"]
-                                        <-Map 14 [SIMPLE_EDGE]
+                                        <-Map 7 [SIMPLE_EDGE]
                                           SHUFFLE [RS_25]
                                             PartitionCols:_col0
                                             Select Operator [SEL_14] (rows=36524 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_90] (rows=36524 width=1119)
                                                 predicate:((d_quarter_name = '2000Q1') and d_date_sk is not null)
-                                                TableScan [TS_12] (rows=73049 width=1119)
-                                                  default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_quarter_name"]
-                                        <-Reducer 9 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Reducer 12 [SIMPLE_EDGE]
                                           SHUFFLE [RS_24]
                                             PartitionCols:_col0
                                             Merge Join Operator [MERGEJOIN_95] (rows=633595212 width=88)
                                               Conds:RS_21._col1, _col2, _col4=RS_22._col1, _col2, _col3(Inner),Output:["_col0","_col1","_col3","_col5","_col6","_col7","_col8","_col10"]
+                                            <-Map 11 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_21]
+                                                PartitionCols:_col1, _col2, _col4
+                                                Select Operator [SEL_8] (rows=575995635 width=88)
+                                                  Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
+                                                  Filter Operator [FIL_88] (rows=575995635 width=88)
+                                                    predicate:(ss_item_sk is not null and ss_customer_sk is not null and ss_ticket_number is not null and ss_sold_date_sk is not null and ss_store_sk is not null)
+                                                    TableScan [TS_6] (rows=575995635 width=88)
+                                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk","ss_store_sk","ss_ticket_number","ss_quantity"]
                                             <-Map 13 [SIMPLE_EDGE]
                                               SHUFFLE [RS_22]
                                                 PartitionCols:_col1, _col2, _col3
@@ -110,20 +118,19 @@ Stage-0
                                                     predicate:(sr_item_sk is not null and sr_customer_sk is not null and sr_ticket_number is not null and sr_returned_date_sk is not null)
                                                     TableScan [TS_9] (rows=57591150 width=77)
                                                       default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_returned_date_sk","sr_item_sk","sr_customer_sk","sr_ticket_number","sr_return_quantity"]
-                                            <-Map 8 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_21]
-                                                PartitionCols:_col1, _col2, _col4
-                                                Select Operator [SEL_8] (rows=575995635 width=88)
-                                                  Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-                                                  Filter Operator [FIL_88] (rows=575995635 width=88)
-                                                    predicate:(ss_item_sk is not null and ss_customer_sk is not null and ss_ticket_number is not null and ss_sold_date_sk is not null and ss_store_sk is not null)
-                                                    TableScan [TS_6] (rows=575995635 width=88)
-                                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk","ss_store_sk","ss_ticket_number","ss_quantity"]
                           <-Reducer 2 [SIMPLE_EDGE]
                             SHUFFLE [RS_40]
                               PartitionCols:_col1, _col2
                               Merge Join Operator [MERGEJOIN_94] (rows=316788826 width=135)
                                 Conds:RS_37._col0=RS_38._col0(Inner),Output:["_col1","_col2","_col3"]
+                              <-Map 7 [SIMPLE_EDGE]
+                                SHUFFLE [RS_38]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_5] (rows=36525 width=1119)
+                                    Output:["_col0"]
+                                    Filter Operator [FIL_87] (rows=36525 width=1119)
+                                      predicate:((d_quarter_name) IN ('2000Q1', '2000Q2', '2000Q3') and d_date_sk is not null)
+                                       Please refer to the previous TableScan [TS_3]
                               <-Map 1 [SIMPLE_EDGE]
                                 SHUFFLE [RS_37]
                                   PartitionCols:_col0
@@ -133,13 +140,4 @@ Stage-0
                                       predicate:(cs_bill_customer_sk is not null and cs_item_sk is not null and cs_sold_date_sk is not null)
                                       TableScan [TS_0] (rows=287989836 width=135)
                                         default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_customer_sk","cs_item_sk","cs_quantity"]
-                              <-Map 7 [SIMPLE_EDGE]
-                                SHUFFLE [RS_38]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_5] (rows=36525 width=1119)
-                                    Output:["_col0"]
-                                    Filter Operator [FIL_87] (rows=36525 width=1119)
-                                      predicate:((d_quarter_name) IN ('2000Q1', '2000Q2', '2000Q3') and d_date_sk is not null)
-                                      TableScan [TS_3] (rows=73049 width=1119)
-                                        default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_quarter_name"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query23.q.out b/ql/src/test/results/clientpositive/perf/query23.q.out
index 7a3201e..5794301 100644
--- a/ql/src/test/results/clientpositive/perf/query23.q.out
+++ b/ql/src/test/results/clientpositive/perf/query23.q.out
@@ -1,5 +1,5 @@
-Warning: Shuffle Join MERGEJOIN[369][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 49' is a cross product
-Warning: Shuffle Join MERGEJOIN[367][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 19' is a cross product
+Warning: Shuffle Join MERGEJOIN[369][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 30' is a cross product
+Warning: Shuffle Join MERGEJOIN[367][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 25' is a cross product
 PREHOOK: query: explain with frequent_ss_items as 
  (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt
   from store_sales
@@ -103,41 +103,41 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 13 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
-Reducer 15 <- Map 14 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
-Reducer 16 <- Map 21 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 30 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 18 (SIMPLE_EDGE)
+Reducer 13 <- Map 19 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 14 <- Reducer 13 (SIMPLE_EDGE)
+Reducer 15 <- Map 11 (SIMPLE_EDGE), Map 18 (SIMPLE_EDGE)
+Reducer 16 <- Map 19 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
 Reducer 17 <- Reducer 16 (SIMPLE_EDGE)
-Reducer 18 <- Reducer 17 (CUSTOM_SIMPLE_EDGE)
-Reducer 19 <- Reducer 18 (CUSTOM_SIMPLE_EDGE), Reducer 26 (CUSTOM_SIMPLE_EDGE), Reducer 31 (CUSTOM_SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 23 <- Map 22 (SIMPLE_EDGE), Map 27 (SIMPLE_EDGE)
-Reducer 24 <- Map 28 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
-Reducer 25 <- Reducer 24 (SIMPLE_EDGE)
-Reducer 26 <- Reducer 25 (CUSTOM_SIMPLE_EDGE)
-Reducer 3 <- Reducer 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 30 <- Map 29 (SIMPLE_EDGE), Map 32 (SIMPLE_EDGE)
-Reducer 31 <- Reducer 30 (SIMPLE_EDGE)
-Reducer 34 <- Map 33 (SIMPLE_EDGE), Map 37 (SIMPLE_EDGE)
-Reducer 35 <- Reducer 34 (SIMPLE_EDGE), Reducer 41 (SIMPLE_EDGE)
-Reducer 36 <- Reducer 35 (SIMPLE_EDGE), Reducer 49 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 39 <- Map 38 (SIMPLE_EDGE), Map 42 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 19 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 40 <- Map 43 (SIMPLE_EDGE), Reducer 39 (SIMPLE_EDGE)
-Reducer 41 <- Reducer 40 (SIMPLE_EDGE)
-Reducer 45 <- Map 44 (SIMPLE_EDGE), Map 50 (SIMPLE_EDGE)
-Reducer 46 <- Map 51 (SIMPLE_EDGE), Reducer 45 (SIMPLE_EDGE)
-Reducer 47 <- Reducer 46 (SIMPLE_EDGE)
-Reducer 48 <- Reducer 47 (CUSTOM_SIMPLE_EDGE)
-Reducer 49 <- Reducer 48 (CUSTOM_SIMPLE_EDGE), Reducer 56 (CUSTOM_SIMPLE_EDGE), Reducer 61 (CUSTOM_SIMPLE_EDGE)
-Reducer 53 <- Map 52 (SIMPLE_EDGE), Map 57 (SIMPLE_EDGE)
-Reducer 54 <- Map 58 (SIMPLE_EDGE), Reducer 53 (SIMPLE_EDGE)
-Reducer 55 <- Reducer 54 (SIMPLE_EDGE)
-Reducer 56 <- Reducer 55 (CUSTOM_SIMPLE_EDGE)
+Reducer 21 <- Map 20 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
+Reducer 22 <- Map 40 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 23 <- Reducer 22 (SIMPLE_EDGE)
+Reducer 24 <- Reducer 23 (CUSTOM_SIMPLE_EDGE)
+Reducer 25 <- Reducer 24 (CUSTOM_SIMPLE_EDGE), Reducer 35 (CUSTOM_SIMPLE_EDGE), Reducer 42 (CUSTOM_SIMPLE_EDGE)
+Reducer 26 <- Map 20 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
+Reducer 27 <- Map 40 (SIMPLE_EDGE), Reducer 26 (SIMPLE_EDGE)
+Reducer 28 <- Reducer 27 (SIMPLE_EDGE)
+Reducer 29 <- Reducer 28 (CUSTOM_SIMPLE_EDGE)
+Reducer 3 <- Reducer 14 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 30 <- Reducer 29 (CUSTOM_SIMPLE_EDGE), Reducer 39 (CUSTOM_SIMPLE_EDGE), Reducer 44 (CUSTOM_SIMPLE_EDGE)
+Reducer 32 <- Map 31 (SIMPLE_EDGE), Map 45 (SIMPLE_EDGE)
+Reducer 33 <- Map 40 (SIMPLE_EDGE), Reducer 32 (SIMPLE_EDGE)
+Reducer 34 <- Reducer 33 (SIMPLE_EDGE)
+Reducer 35 <- Reducer 34 (CUSTOM_SIMPLE_EDGE)
+Reducer 36 <- Map 31 (SIMPLE_EDGE), Map 45 (SIMPLE_EDGE)
+Reducer 37 <- Map 40 (SIMPLE_EDGE), Reducer 36 (SIMPLE_EDGE)
+Reducer 38 <- Reducer 37 (SIMPLE_EDGE)
+Reducer 39 <- Reducer 38 (CUSTOM_SIMPLE_EDGE)
+Reducer 4 <- Reducer 25 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 41 <- Map 40 (SIMPLE_EDGE), Map 46 (SIMPLE_EDGE)
+Reducer 42 <- Reducer 41 (SIMPLE_EDGE)
+Reducer 43 <- Map 40 (SIMPLE_EDGE), Map 46 (SIMPLE_EDGE)
+Reducer 44 <- Reducer 43 (SIMPLE_EDGE)
 Reducer 6 <- Union 5 (CUSTOM_SIMPLE_EDGE)
-Reducer 60 <- Map 59 (SIMPLE_EDGE), Map 62 (SIMPLE_EDGE)
-Reducer 61 <- Reducer 60 (SIMPLE_EDGE)
-Reducer 9 <- Map 12 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 47 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 17 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -150,7 +150,7 @@ Stage-0
           Group By Operator [GBY_243] (rows=1 width=112)
             Output:["_col0"],aggregations:["sum(VALUE._col0)"]
           <-Union 5 [CUSTOM_SIMPLE_EDGE]
-            <-Reducer 36 [CONTAINS]
+            <-Reducer 10 [CONTAINS]
               Reduce Output Operator [RS_242]
                 Group By Operator [GBY_241] (rows=1 width=112)
                   Output:["_col0"],aggregations:["sum(_col0)"]
@@ -158,89 +158,7 @@ Stage-0
                     Output:["_col0"]
                     Merge Join Operator [MERGEJOIN_370] (rows=191667562 width=135)
                       Conds:RS_234._col2=RS_235._col0(Inner),Output:["_col3","_col4"]
-                    <-Reducer 35 [SIMPLE_EDGE]
-                      SHUFFLE [RS_234]
-                        PartitionCols:_col2
-                        Merge Join Operator [MERGEJOIN_366] (rows=174243235 width=135)
-                          Conds:RS_231._col1=RS_232._col0(Inner),Output:["_col2","_col3","_col4"]
-                        <-Reducer 34 [SIMPLE_EDGE]
-                          SHUFFLE [RS_231]
-                            PartitionCols:_col1
-                            Merge Join Operator [MERGEJOIN_357] (rows=158402938 width=135)
-                              Conds:RS_228._col0=RS_229._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
-                            <-Map 33 [SIMPLE_EDGE]
-                              SHUFFLE [RS_228]
-                                PartitionCols:_col0
-                                Select Operator [SEL_121] (rows=144002668 width=135)
-                                  Output:["_col0","_col1","_col2","_col3","_col4"]
-                                  Filter Operator [FIL_334] (rows=144002668 width=135)
-                                    predicate:(ws_item_sk is not null and ws_bill_customer_sk is not null and ws_sold_date_sk is not null)
-                                    TableScan [TS_119] (rows=144002668 width=135)
-                                      default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_bill_customer_sk","ws_quantity","ws_list_price"]
-                            <-Map 37 [SIMPLE_EDGE]
-                              SHUFFLE [RS_229]
-                                PartitionCols:_col0
-                                Select Operator [SEL_124] (rows=18262 width=1119)
-                                  Output:["_col0"]
-                                  Filter Operator [FIL_335] (rows=18262 width=1119)
-                                    predicate:((d_year = 1999) and (d_moy = 1) and d_date_sk is not null)
-                                    TableScan [TS_122] (rows=73049 width=1119)
-                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                        <-Reducer 41 [SIMPLE_EDGE]
-                          SHUFFLE [RS_232]
-                            PartitionCols:_col0
-                            Group By Operator [GBY_150] (rows=58079562 width=88)
-                              Output:["_col0"],keys:_col1
-                              Select Operator [SEL_146] (rows=116159124 width=88)
-                                Output:["_col1"]
-                                Filter Operator [FIL_145] (rows=116159124 width=88)
-                                  predicate:(_col3 > 4)
-                                  Select Operator [SEL_348] (rows=348477374 width=88)
-                                    Output:["_col0","_col3"]
-                                    Group By Operator [GBY_144] (rows=348477374 width=88)
-                                      Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                    <-Reducer 40 [SIMPLE_EDGE]
-                                      SHUFFLE [RS_143]
-                                        PartitionCols:_col0
-                                        Group By Operator [GBY_142] (rows=696954748 width=88)
-                                          Output:["_col0","_col1","_col2","_col3"],aggregations:["count()"],keys:_col1, _col0, _col2
-                                          Select Operator [SEL_140] (rows=696954748 width=88)
-                                            Output:["_col0","_col1","_col2"]
-                                            Merge Join Operator [MERGEJOIN_359] (rows=696954748 width=88)
-                                              Conds:RS_137._col1=RS_138._col0(Inner),Output:["_col3","_col5","_col6"]
-                                            <-Map 43 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_138]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_133] (rows=462000 width=1436)
-                                                  Output:["_col0","_col1"]
-                                                  Filter Operator [FIL_338] (rows=462000 width=1436)
-                                                    predicate:i_item_sk is not null
-                                                    TableScan [TS_131] (rows=462000 width=1436)
-                                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_desc"]
-                                            <-Reducer 39 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_137]
-                                                PartitionCols:_col1
-                                                Merge Join Operator [MERGEJOIN_358] (rows=633595212 width=88)
-                                                  Conds:RS_134._col0=RS_135._col0(Inner),Output:["_col1","_col3"]
-                                                <-Map 38 [SIMPLE_EDGE]
-                                                  SHUFFLE [RS_134]
-                                                    PartitionCols:_col0
-                                                    Select Operator [SEL_127] (rows=575995635 width=88)
-                                                      Output:["_col0","_col1"]
-                                                      Filter Operator [FIL_336] (rows=575995635 width=88)
-                                                        predicate:(ss_sold_date_sk is not null and ss_item_sk is not null)
-                                                        TableScan [TS_125] (rows=575995635 width=88)
-                                                          default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk"]
-                                                <-Map 42 [SIMPLE_EDGE]
-                                                  SHUFFLE [RS_135]
-                                                    PartitionCols:_col0
-                                                    Select Operator [SEL_130] (rows=36525 width=1119)
-                                                      Output:["_col0","_col1"]
-                                                      Filter Operator [FIL_337] (rows=36525 width=1119)
-                                                        predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
-                                                        TableScan [TS_128] (rows=73049 width=1119)
-                                                          default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_year"]
-                    <-Reducer 49 [SIMPLE_EDGE]
+                    <-Reducer 30 [SIMPLE_EDGE]
                       SHUFFLE [RS_235]
                         PartitionCols:_col0
                         Select Operator [SEL_227] (rows=105599202 width=321)
@@ -249,21 +167,21 @@ Stage-0
                             predicate:(_col3 > (0.95 * _col1))
                             Merge Join Operator [MERGEJOIN_369] (rows=316797606 width=321)
                               Conds:(Inner),(Inner),Output:["_col1","_col2","_col3"]
-                            <-Reducer 48 [CUSTOM_SIMPLE_EDGE]
+                            <-Reducer 29 [CUSTOM_SIMPLE_EDGE]
                               PARTITION_ONLY_SHUFFLE [RS_222]
                                 Select Operator [SEL_180] (rows=1 width=8)
                                   Filter Operator [FIL_179] (rows=1 width=8)
                                     predicate:(sq_count_check(_col0) <= 1)
                                     Group By Operator [GBY_177] (rows=1 width=8)
                                       Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                    <-Reducer 47 [CUSTOM_SIMPLE_EDGE]
+                                    <-Reducer 28 [CUSTOM_SIMPLE_EDGE]
                                       PARTITION_ONLY_SHUFFLE [RS_176]
                                         Group By Operator [GBY_175] (rows=1 width=8)
                                           Output:["_col0"],aggregations:["count()"]
                                           Select Operator [SEL_172] (rows=348477374 width=88)
                                             Group By Operator [GBY_171] (rows=348477374 width=88)
                                               Output:["_col0"],keys:KEY._col0
-                                            <-Reducer 46 [SIMPLE_EDGE]
+                                            <-Reducer 27 [SIMPLE_EDGE]
                                               SHUFFLE [RS_170]
                                                 PartitionCols:_col0
                                                 Group By Operator [GBY_169] (rows=696954748 width=88)
@@ -272,43 +190,43 @@ Stage-0
                                                     Output:["_col0"]
                                                     Merge Join Operator [MERGEJOIN_361] (rows=696954748 width=88)
                                                       Conds:RS_164._col1=RS_165._col0(Inner),Output:["_col6"]
-                                                    <-Map 51 [SIMPLE_EDGE]
+                                                    <-Map 40 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_165]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_160] (rows=80000000 width=860)
                                                           Output:["_col0"]
                                                           Filter Operator [FIL_341] (rows=80000000 width=860)
                                                             predicate:c_customer_sk is not null
-                                                            TableScan [TS_158] (rows=80000000 width=860)
+                                                            TableScan [TS_39] (rows=80000000 width=860)
                                                               default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk"]
-                                                    <-Reducer 45 [SIMPLE_EDGE]
+                                                    <-Reducer 26 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_164]
                                                         PartitionCols:_col1
                                                         Merge Join Operator [MERGEJOIN_360] (rows=633595212 width=88)
                                                           Conds:RS_161._col0=RS_162._col0(Inner),Output:["_col1"]
-                                                        <-Map 44 [SIMPLE_EDGE]
-                                                          SHUFFLE [RS_161]
-                                                            PartitionCols:_col0
-                                                            Select Operator [SEL_154] (rows=575995635 width=88)
-                                                              Output:["_col0","_col1"]
-                                                              Filter Operator [FIL_339] (rows=575995635 width=88)
-                                                                predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
-                                                                TableScan [TS_152] (rows=575995635 width=88)
-                                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk"]
-                                                        <-Map 50 [SIMPLE_EDGE]
+                                                        <-Map 31 [SIMPLE_EDGE]
                                                           SHUFFLE [RS_162]
                                                             PartitionCols:_col0
                                                             Select Operator [SEL_157] (rows=36525 width=1119)
                                                               Output:["_col0"]
                                                               Filter Operator [FIL_340] (rows=36525 width=1119)
                                                                 predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
-                                                                TableScan [TS_155] (rows=73049 width=1119)
+                                                                TableScan [TS_36] (rows=73049 width=1119)
                                                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                            <-Reducer 56 [CUSTOM_SIMPLE_EDGE]
+                                                        <-Map 20 [SIMPLE_EDGE]
+                                                          SHUFFLE [RS_161]
+                                                            PartitionCols:_col0
+                                                            Select Operator [SEL_154] (rows=575995635 width=88)
+                                                              Output:["_col0","_col1"]
+                                                              Filter Operator [FIL_339] (rows=575995635 width=88)
+                                                                predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
+                                                                TableScan [TS_33] (rows=575995635 width=88)
+                                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk"]
+                            <-Reducer 39 [CUSTOM_SIMPLE_EDGE]
                               PARTITION_ONLY_SHUFFLE [RS_223]
                                 Group By Operator [GBY_205] (rows=1 width=224)
                                   Output:["_col0"],aggregations:["max(VALUE._col0)"]
-                                <-Reducer 55 [CUSTOM_SIMPLE_EDGE]
+                                <-Reducer 38 [CUSTOM_SIMPLE_EDGE]
                                   PARTITION_ONLY_SHUFFLE [RS_204]
                                     Group By Operator [GBY_203] (rows=1 width=224)
                                       Output:["_col0"],aggregations:["max(_col1)"]
@@ -316,7 +234,7 @@ Stage-0
                                         Output:["_col1"]
                                         Group By Operator [GBY_200] (rows=348477374 width=88)
                                           Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                                        <-Reducer 54 [SIMPLE_EDGE]
+                                        <-Reducer 37 [SIMPLE_EDGE]
                                           SHUFFLE [RS_199]
                                             PartitionCols:_col0
                                             Group By Operator [GBY_198] (rows=696954748 width=88)
@@ -325,43 +243,41 @@ Stage-0
                                                 Output:["_col0","_col1"]
                                                 Merge Join Operator [MERGEJOIN_363] (rows=696954748 width=88)
                                                   Conds:RS_193._col1=RS_194._col0(Inner),Output:["_col2","_col3","_col6"]
-                                                <-Map 58 [SIMPLE_EDGE]
+                                                <-Map 40 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_194]
                                                     PartitionCols:_col0
                                                     Select Operator [SEL_189] (rows=80000000 width=860)
                                                       Output:["_col0"]
                                                       Filter Operator [FIL_344] (rows=80000000 width=860)
                                                         predicate:c_customer_sk is not null
-                                                        TableScan [TS_187] (rows=80000000 width=860)
-                                                          default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk"]
-                                                <-Reducer 53 [SIMPLE_EDGE]
+                                                         Please refer to the previous TableScan [TS_39]
+                                                <-Reducer 36 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_193]
                                                     PartitionCols:_col1
                                                     Merge Join Operator [MERGEJOIN_362] (rows=633595212 width=88)
                                                       Conds:RS_190._col0=RS_191._col0(Inner),Output:["_col1","_col2","_col3"]
-                                                    <-Map 52 [SIMPLE_EDGE]
+                                                    <-Map 31 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_191]
+                                                        PartitionCols:_col0
+                                                        Select Operator [SEL_186] (rows=36525 width=1119)
+                                                          Output:["_col0"]
+                                                          Filter Operator [FIL_343] (rows=36525 width=1119)
+                                                            predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
+                                                             Please refer to the previous TableScan [TS_36]
+                                                    <-Map 45 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_190]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_183] (rows=575995635 width=88)
                                                           Output:["_col0","_col1","_col2","_col3"]
                                                           Filter Operator [FIL_342] (rows=575995635 width=88)
                                                             predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
-                                                            TableScan [TS_181] (rows=575995635 width=88)
+                                                            TableScan [TS_62] (rows=575995635 width=88)
                                                               default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk","ss_quantity","ss_sales_price"]
-                                                    <-Map 57 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_191]
-                                                        PartitionCols:_col0
-                                                        Select Operator [SEL_186] (rows=36525 width=1119)
-                                                          Output:["_col0"]
-                                                          Filter Operator [FIL_343] (rows=36525 width=1119)
-                                                            predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
-                                                            TableScan [TS_184] (rows=73049 width=1119)
-                                                              default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                            <-Reducer 61 [CUSTOM_SIMPLE_EDGE]
+                            <-Reducer 44 [CUSTOM_SIMPLE_EDGE]
                               PARTITION_ONLY_SHUFFLE [RS_224]
                                 Group By Operator [GBY_220] (rows=316797606 width=88)
                                   Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                                <-Reducer 60 [SIMPLE_EDGE]
+                                <-Reducer 43 [SIMPLE_EDGE]
                                   SHUFFLE [RS_219]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_218] (rows=633595212 width=88)
@@ -370,24 +286,105 @@ Stage-0
                                         Output:["_col0","_col1"]
                                         Merge Join Operator [MERGEJOIN_364] (rows=633595212 width=88)
                                           Conds:RS_213._col0=RS_214._col0(Inner),Output:["_col1","_col2","_col3"]
-                                        <-Map 59 [SIMPLE_EDGE]
+                                        <-Map 40 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_214]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_212] (rows=80000000 width=860)
+                                              Output:["_col0"]
+                                              Filter Operator [FIL_346] (rows=80000000 width=860)
+                                                predicate:c_customer_sk is not null
+                                                 Please refer to the previous TableScan [TS_39]
+                                        <-Map 46 [SIMPLE_EDGE]
                                           SHUFFLE [RS_213]
                                             PartitionCols:_col0
                                             Select Operator [SEL_209] (rows=575995635 width=88)
                                               Output:["_col0","_col1","_col2"]
                                               Filter Operator [FIL_345] (rows=575995635 width=88)
                                                 predicate:ss_customer_sk is not null
-                                                TableScan [TS_207] (rows=575995635 width=88)
+                                                TableScan [TS_88] (rows=575995635 width=88)
                                                   default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_customer_sk","ss_quantity","ss_sales_price"]
-                                        <-Map 62 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_214]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_212] (rows=80000000 width=860)
-                                              Output:["_col0"]
-                                              Filter Operator [FIL_346] (rows=80000000 width=860)
-                                                predicate:c_customer_sk is not null
-                                                TableScan [TS_210] (rows=80000000 width=860)
-                                                  default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk"]
+                    <-Reducer 9 [SIMPLE_EDGE]
+                      SHUFFLE [RS_234]
+                        PartitionCols:_col2
+                        Merge Join Operator [MERGEJOIN_366] (rows=174243235 width=135)
+                          Conds:RS_231._col1=RS_232._col0(Inner),Output:["_col2","_col3","_col4"]
+                        <-Reducer 17 [SIMPLE_EDGE]
+                          SHUFFLE [RS_232]
+                            PartitionCols:_col0
+                            Group By Operator [GBY_150] (rows=58079562 width=88)
+                              Output:["_col0"],keys:_col1
+                              Select Operator [SEL_146] (rows=116159124 width=88)
+                                Output:["_col1"]
+                                Filter Operator [FIL_145] (rows=116159124 width=88)
+                                  predicate:(_col3 > 4)
+                                  Select Operator [SEL_348] (rows=348477374 width=88)
+                                    Output:["_col0","_col3"]
+                                    Group By Operator [GBY_144] (rows=348477374 width=88)
+                                      Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
+                                    <-Reducer 16 [SIMPLE_EDGE]
+                                      SHUFFLE [RS_143]
+                                        PartitionCols:_col0
+                                        Group By Operator [GBY_142] (rows=696954748 width=88)
+                                          Output:["_col0","_col1","_col2","_col3"],aggregations:["count()"],keys:_col1, _col0, _col2
+                                          Select Operator [SEL_140] (rows=696954748 width=88)
+                                            Output:["_col0","_col1","_col2"]
+                                            Merge Join Operator [MERGEJOIN_359] (rows=696954748 width=88)
+                                              Conds:RS_137._col1=RS_138._col0(Inner),Output:["_col3","_col5","_col6"]
+                                            <-Map 19 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_138]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_133] (rows=462000 width=1436)
+                                                  Output:["_col0","_col1"]
+                                                  Filter Operator [FIL_338] (rows=462000 width=1436)
+                                                    predicate:i_item_sk is not null
+                                                    TableScan [TS_12] (rows=462000 width=1436)
+                                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_desc"]
+                                            <-Reducer 15 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_137]
+                                                PartitionCols:_col1
+                                                Merge Join Operator [MERGEJOIN_358] (rows=633595212 width=88)
+                                                  Conds:RS_134._col0=RS_135._col0(Inner),Output:["_col1","_col3"]
+                                                <-Map 11 [SIMPLE_EDGE]
+                                                  SHUFFLE [RS_134]
+                                                    PartitionCols:_col0
+                                                    Select Operator [SEL_127] (rows=575995635 width=88)
+                                                      Output:["_col0","_col1"]
+                                                      Filter Operator [FIL_336] (rows=575995635 width=88)
+                                                        predicate:(ss_sold_date_sk is not null and ss_item_sk is not null)
+                                                        TableScan [TS_6] (rows=575995635 width=88)
+                                                          default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk"]
+                                                <-Map 18 [SIMPLE_EDGE]
+                                                  SHUFFLE [RS_135]
+                                                    PartitionCols:_col0
+                                                    Select Operator [SEL_130] (rows=36525 width=1119)
+                                                      Output:["_col0","_col1"]
+                                                      Filter Operator [FIL_337] (rows=36525 width=1119)
+                                                        predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
+                                                        TableScan [TS_9] (rows=73049 width=1119)
+                                                          default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_year"]
+                        <-Reducer 8 [SIMPLE_EDGE]
+                          SHUFFLE [RS_231]
+                            PartitionCols:_col1
+                            Merge Join Operator [MERGEJOIN_357] (rows=158402938 width=135)
+                              Conds:RS_228._col0=RS_229._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
+                            <-Map 7 [SIMPLE_EDGE]
+                              SHUFFLE [RS_229]
+                                PartitionCols:_col0
+                                Select Operator [SEL_124] (rows=18262 width=1119)
+                                  Output:["_col0"]
+                                  Filter Operator [FIL_335] (rows=18262 width=1119)
+                                    predicate:((d_year = 1999) and (d_moy = 1) and d_date_sk is not null)
+                                    TableScan [TS_3] (rows=73049 width=1119)
+                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                            <-Map 47 [SIMPLE_EDGE]
+                              SHUFFLE [RS_228]
+                                PartitionCols:_col0
+                                Select Operator [SEL_121] (rows=144002668 width=135)
+                                  Output:["_col0","_col1","_col2","_col3","_col4"]
+                                  Filter Operator [FIL_334] (rows=144002668 width=135)
+                                    predicate:(ws_item_sk is not null and ws_bill_customer_sk is not null and ws_sold_date_sk is not null)
+                                    TableScan [TS_119] (rows=144002668 width=135)
+                                      default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_bill_customer_sk","ws_quantity","ws_list_price"]
             <-Reducer 4 [CONTAINS]
               Reduce Output Operator [RS_242]
                 Group By Operator [GBY_241] (rows=1 width=112)
@@ -396,7 +393,7 @@ Stage-0
                     Output:["_col0"]
                     Merge Join Operator [MERGEJOIN_368] (rows=383314495 width=135)
                       Conds:RS_115._col1=RS_116._col0(Inner),Output:["_col3","_col4"]
-                    <-Reducer 19 [SIMPLE_EDGE]
+                    <-Reducer 25 [SIMPLE_EDGE]
                       SHUFFLE [RS_116]
                         PartitionCols:_col0
                         Select Operator [SEL_108] (rows=105599202 width=321)
@@ -405,21 +402,21 @@ Stage-0
                             predicate:(_col3 > (0.95 * _col1))
                             Merge Join Operator [MERGEJOIN_367] (rows=316797606 width=321)
                               Conds:(Inner),(Inner),Output:["_col1","_col2","_col3"]
-                            <-Reducer 18 [CUSTOM_SIMPLE_EDGE]
+                            <-Reducer 24 [CUSTOM_SIMPLE_EDGE]
                               PARTITION_ONLY_SHUFFLE [RS_103]
                                 Select Operator [SEL_61] (rows=1 width=8)
                                   Filter Operator [FIL_60] (rows=1 width=8)
                                     predicate:(sq_count_check(_col0) <= 1)
                                     Group By Operator [GBY_58] (rows=1 width=8)
                                       Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                    <-Reducer 17 [CUSTOM_SIMPLE_EDGE]
+                                    <-Reducer 23 [CUSTOM_SIMPLE_EDGE]
                                       PARTITION_ONLY_SHUFFLE [RS_57]
                                         Group By Operator [GBY_56] (rows=1 width=8)
                                           Output:["_col0"],aggregations:["count()"]
                                           Select Operator [SEL_53] (rows=348477374 width=88)
                                             Group By Operator [GBY_52] (rows=348477374 width=88)
                                               Output:["_col0"],keys:KEY._col0
-                                            <-Reducer 16 [SIMPLE_EDGE]
+                                            <-Reducer 22 [SIMPLE_EDGE]
                                               SHUFFLE [RS_51]
                                                 PartitionCols:_col0
                                                 Group By Operator [GBY_50] (rows=696954748 width=88)
@@ -428,43 +425,40 @@ Stage-0
                                                     Output:["_col0"]
                                                     Merge Join Operator [MERGEJOIN_353] (rows=696954748 width=88)
                                                       Conds:RS_45._col1=RS_46._col0(Inner),Output:["_col6"]
-                                                    <-Map 21 [SIMPLE_EDGE]
+                                                    <-Map 40 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_46]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_41] (rows=80000000 width=860)
                                                           Output:["_col0"]
                                                           Filter Operator [FIL_328] (rows=80000000 width=860)
                                                             predicate:c_customer_sk is not null
-                                                            TableScan [TS_39] (rows=80000000 width=860)
-                                                              default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk"]
-                                                    <-Reducer 15 [SIMPLE_EDGE]
+                                                             Please refer to the previous TableScan [TS_39]
+                                                    <-Reducer 21 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_45]
                                                         PartitionCols:_col1
                                                         Merge Join Operator [MERGEJOIN_352] (rows=633595212 width=88)
                                                           Conds:RS_42._col0=RS_43._col0(Inner),Output:["_col1"]
-                                                        <-Map 14 [SIMPLE_EDGE]
-                                                          SHUFFLE [RS_42]
-                                                            PartitionCols:_col0
-                                                            Select Operator [SEL_35] (rows=575995635 width=88)
-                                                              Output:["_col0","_col1"]
-                                                              Filter Operator [FIL_326] (rows=575995635 width=88)
-                                                                predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
-                                                                TableScan [TS_33] (rows=575995635 width=88)
-                                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk"]
-                                                        <-Map 20 [SIMPLE_EDGE]
+                                                        <-Map 31 [SIMPLE_EDGE]
                                                           SHUFFLE [RS_43]
                                                             PartitionCols:_col0
                                                             Select Operator [SEL_38] (rows=36525 width=1119)
                                                               Output:["_col0"]
                                                               Filter Operator [FIL_327] (rows=36525 width=1119)
                                                                 predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
-                                                                TableScan [TS_36] (rows=73049 width=1119)
-                                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                            <-Reducer 26 [CUSTOM_SIMPLE_EDGE]
+                                                                 Please refer to the previous TableScan [TS_36]
+                                                        <-Map 20 [SIMPLE_EDGE]
+                                                          SHUFFLE [RS_42]
+                                                            PartitionCols:_col0
+                                                            Select Operator [SEL_35] (rows=575995635 width=88)
+                                                              Output:["_col0","_col1"]
+                                                              Filter Operator [FIL_326] (rows=575995635 width=88)
+                                                                predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
+                                                                 Please refer to the previous TableScan [TS_33]
+                            <-Reducer 35 [CUSTOM_SIMPLE_EDGE]
                               PARTITION_ONLY_SHUFFLE [RS_104]
                                 Group By Operator [GBY_86] (rows=1 width=224)
                                   Output:["_col0"],aggregations:["max(VALUE._col0)"]
-                                <-Reducer 25 [CUSTOM_SIMPLE_EDGE]
+                                <-Reducer 34 [CUSTOM_SIMPLE_EDGE]
                                   PARTITION_ONLY_SHUFFLE [RS_85]
                                     Group By Operator [GBY_84] (rows=1 width=224)
                                       Output:["_col0"],aggregations:["max(_col1)"]
@@ -472,7 +466,7 @@ Stage-0
                                         Output:["_col1"]
                                         Group By Operator [GBY_81] (rows=348477374 width=88)
                                           Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                                        <-Reducer 24 [SIMPLE_EDGE]
+                                        <-Reducer 33 [SIMPLE_EDGE]
                                           SHUFFLE [RS_80]
                                             PartitionCols:_col0
                                             Group By Operator [GBY_79] (rows=696954748 width=88)
@@ -481,43 +475,40 @@ Stage-0
                                                 Output:["_col0","_col1"]
                                                 Merge Join Operator [MERGEJOIN_355] (rows=696954748 width=88)
                                                   Conds:RS_74._col1=RS_75._col0(Inner),Output:["_col2","_col3","_col6"]
-                                                <-Map 28 [SIMPLE_EDGE]
+                                                <-Map 40 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_75]
                                                     PartitionCols:_col0
                                                     Select Operator [SEL_70] (rows=80000000 width=860)
                                                       Output:["_col0"]
                                                       Filter Operator [FIL_331] (rows=80000000 width=860)
                                                         predicate:c_customer_sk is not null
-                                                        TableScan [TS_68] (rows=80000000 width=860)
-                                                          default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk"]
-                                                <-Reducer 23 [SIMPLE_EDGE]
+                                                         Please refer to the previous TableScan [TS_39]
+                                                <-Reducer 32 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_74]
                                                     PartitionCols:_col1
                                                     Merge Join Operator [MERGEJOIN_354] (rows=633595212 width=88)
                                                       Conds:RS_71._col0=RS_72._col0(Inner),Output:["_col1","_col2","_col3"]
-                                                    <-Map 22 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_71]
-                                                        PartitionCols:_col0
-                                                        Select Operator [SEL_64] (rows=575995635 width=88)
-                                                          Output:["_col0","_col1","_col2","_col3"]
-                                                          Filter Operator [FIL_329] (rows=575995635 width=88)
-                                                            predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
-                                                            TableScan [TS_62] (rows=575995635 width=88)
-                                                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk","ss_quantity","ss_sales_price"]
-                                                    <-Map 27 [SIMPLE_EDGE]
+                                                    <-Map 31 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_72]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_67] (rows=36525 width=1119)
                                                           Output:["_col0"]
                                                           Filter Operator [FIL_330] (rows=36525 width=1119)
                                                             predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
-                                                            TableScan [TS_65] (rows=73049 width=1119)
-                                                              default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                            <-Reducer 31 [CUSTOM_SIMPLE_EDGE]
+                                                             Please refer to the previous TableScan [TS_36]
+                                                    <-Map 45 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_71]
+                                                        PartitionCols:_col0
+                                                        Select Operator [SEL_64] (rows=575995635 width=88)
+                                                          Output:["_col0","_col1","_col2","_col3"]
+                                                          Filter Operator [FIL_329] (rows=575995635 width=88)
+                                                            predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
+                                                             Please refer to the previous TableScan [TS_62]
+                            <-Reducer 42 [CUSTOM_SIMPLE_EDGE]
                               PARTITION_ONLY_SHUFFLE [RS_105]
                                 Group By Operator [GBY_101] (rows=316797606 width=88)
                                   Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                                <-Reducer 30 [SIMPLE_EDGE]
+                                <-Reducer 41 [SIMPLE_EDGE]
                                   SHUFFLE [RS_100]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_99] (rows=633595212 width=88)
@@ -526,30 +517,28 @@ Stage-0
                                         Output:["_col0","_col1"]
                                         Merge Join Operator [MERGEJOIN_356] (rows=633595212 width=88)
                                           Conds:RS_94._col0=RS_95._col0(Inner),Output:["_col1","_col2","_col3"]
-                                        <-Map 29 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_94]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_90] (rows=575995635 width=88)
-                                              Output:["_col0","_col1","_col2"]
-                                              Filter Operator [FIL_332] (rows=575995635 width=88)
-                                                predicate:ss_customer_sk is not null
-                                                TableScan [TS_88] (rows=575995635 width=88)
-                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_customer_sk","ss_quantity","ss_sales_price"]
-                                        <-Map 32 [SIMPLE_EDGE]
+                                        <-Map 40 [SIMPLE_EDGE]
                                           SHUFFLE [RS_95]
                                             PartitionCols:_col0
                                             Select Operator [SEL_93] (rows=80000000 width=860)
                                               Output:["_col0"]
                                               Filter Operator [FIL_333] (rows=80000000 width=860)
                                                 predicate:c_customer_sk is not null
-                                                TableScan [TS_91] (rows=80000000 width=860)
-                                                  default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk"]
+                                                 Please refer to the previous TableScan [TS_39]
+                                        <-Map 46 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_94]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_90] (rows=575995635 width=88)
+                                              Output:["_col0","_col1","_col2"]
+                                              Filter Operator [FIL_332] (rows=575995635 width=88)
+                                                predicate:ss_customer_sk is not null
+                                                 Please refer to the previous TableScan [TS_88]
                     <-Reducer 3 [SIMPLE_EDGE]
                       SHUFFLE [RS_115]
                         PartitionCols:_col1
                         Merge Join Operator [MERGEJOIN_365] (rows=348467716 width=135)
                           Conds:RS_112._col2=RS_113._col0(Inner),Output:["_col1","_col3","_col4"]
-                        <-Reducer 11 [SIMPLE_EDGE]
+                        <-Reducer 14 [SIMPLE_EDGE]
                           SHUFFLE [RS_113]
                             PartitionCols:_col0
                             Group By Operator [GBY_31] (rows=58079562 width=88)
@@ -562,7 +551,7 @@ Stage-0
                                     Output:["_col0","_col3"]
                                     Group By Operator [GBY_25] (rows=348477374 width=88)
                                       Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                    <-Reducer 10 [SIMPLE_EDGE]
+                                    <-Reducer 13 [SIMPLE_EDGE]
                                       SHUFFLE [RS_24]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_23] (rows=696954748 width=88)
@@ -571,43 +560,48 @@ Stage-0
                                             Output:["_col0","_col1","_col2"]
                                             Merge Join Operator [MERGEJOIN_351] (rows=696954748 width=88)
                                               Conds:RS_18._col1=RS_19._col0(Inner),Output:["_col3","_col5","_col6"]
-                                            <-Map 13 [SIMPLE_EDGE]
+                                            <-Map 19 [SIMPLE_EDGE]
                                               SHUFFLE [RS_19]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_14] (rows=462000 width=1436)
                                                   Output:["_col0","_col1"]
                                                   Filter Operator [FIL_325] (rows=462000 width=1436)
                                                     predicate:i_item_sk is not null
-                                                    TableScan [TS_12] (rows=462000 width=1436)
-                                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_desc"]
-                                            <-Reducer 9 [SIMPLE_EDGE]
+                                                     Please refer to the previous TableScan [TS_12]
+                                            <-Reducer 12 [SIMPLE_EDGE]
                                               SHUFFLE [RS_18]
                                                 PartitionCols:_col1
                                                 Merge Join Operator [MERGEJOIN_350] (rows=633595212 width=88)
                                                   Conds:RS_15._col0=RS_16._col0(Inner),Output:["_col1","_col3"]
-                                                <-Map 12 [SIMPLE_EDGE]
-                                                  SHUFFLE [RS_16]
-                                                    PartitionCols:_col0
-                                                    Select Operator [SEL_11] (rows=36525 width=1119)
-                                                      Output:["_col0","_col1"]
-                                                      Filter Operator [FIL_324] (rows=36525 width=1119)
-                                                        predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
-                                                        TableScan [TS_9] (rows=73049 width=1119)
-                                                          default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_year"]
-                                                <-Map 8 [SIMPLE_EDGE]
+                                                <-Map 11 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_15]
                                                     PartitionCols:_col0
                                                     Select Operator [SEL_8] (rows=575995635 width=88)
                                                       Output:["_col0","_col1"]
                                                       Filter Operator [FIL_323] (rows=575995635 width=88)
                                                         predicate:(ss_sold_date_sk is not null and ss_item_sk is not null)
-                                                        TableScan [TS_6] (rows=575995635 width=88)
-                                                          default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk"]
+                                                         Please refer to the previous TableScan [TS_6]
+                                                <-Map 18 [SIMPLE_EDGE]
+                                                  SHUFFLE [RS_16]
+                                                    PartitionCols:_col0
+                                                    Select Operator [SEL_11] (rows=36525 width=1119)
+                                                      Output:["_col0","_col1"]
+                                                      Filter Operator [FIL_324] (rows=36525 width=1119)
+                                                        predicate:((d_year) IN (1999, 2000, 2001, 2002) and d_date_sk is not null)
+                                                         Please refer to the previous TableScan [TS_9]
                         <-Reducer 2 [SIMPLE_EDGE]
                           SHUFFLE [RS_112]
                             PartitionCols:_col2
                             Merge Join Operator [MERGEJOIN_349] (rows=316788826 width=135)
                               Conds:RS_109._col0=RS_110._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
+                            <-Map 7 [SIMPLE_EDGE]
+                              SHUFFLE [RS_110]
+                                PartitionCols:_col0
+                                Select Operator [SEL_5] (rows=18262 width=1119)
+                                  Output:["_col0"]
+                                  Filter Operator [FIL_322] (rows=18262 width=1119)
+                                    predicate:((d_year = 1999) and (d_moy = 1) and d_date_sk is not null)
+                                     Please refer to the previous TableScan [TS_3]
                             <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_109]
                                 PartitionCols:_col0
@@ -617,13 +611,4 @@ Stage-0
                                     predicate:(cs_item_sk is not null and cs_bill_customer_sk is not null and cs_sold_date_sk is not null)
                                     TableScan [TS_0] (rows=287989836 width=135)
                                       default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_customer_sk","cs_item_sk","cs_quantity","cs_list_price"]
-                            <-Map 7 [SIMPLE_EDGE]
-                              SHUFFLE [RS_110]
-                                PartitionCols:_col0
-                                Select Operator [SEL_5] (rows=18262 width=1119)
-                                  Output:["_col0"]
-                                  Filter Operator [FIL_322] (rows=18262 width=1119)
-                                    predicate:((d_year = 1999) and (d_moy = 1) and d_date_sk is not null)
-                                    TableScan [TS_3] (rows=73049 width=1119)
-                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
 


[05/50] [abbrv] hive git commit: HIVE-16652: LlapInputFormat: Seeing "output error" WARN message (Jason Dere, reviewed by Siddharth Seth)

Posted by we...@apache.org.
HIVE-16652: LlapInputFormat: Seeing "output error" WARN message (Jason Dere, reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4291c467
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4291c467
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4291c467

Branch: refs/heads/hive-14535
Commit: 4291c467aac81bcef140f1b8b8cdaba6edaf2f96
Parents: fea6df6
Author: Jason Dere <jd...@hortonworks.com>
Authored: Fri May 12 16:52:31 2017 -0700
Committer: Jason Dere <jd...@hortonworks.com>
Committed: Fri May 12 16:52:31 2017 -0700

----------------------------------------------------------------------
 .../ext/LlapTaskUmbilicalExternalClient.java    | 89 +++++++++++++++++++-
 .../helpers/LlapTaskUmbilicalServer.java        |  4 +
 2 files changed, 90 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4291c467/llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java
----------------------------------------------------------------------
diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java b/llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java
index 7d0d6d2..c7de417 100644
--- a/llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java
+++ b/llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java
@@ -30,6 +30,7 @@ import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
@@ -47,6 +48,8 @@ import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableV
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto;
+import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto;
+import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary;
 import org.apache.hadoop.hive.llap.protocol.LlapTaskUmbilicalProtocol;
 import org.apache.hadoop.hive.llap.security.LlapTokenIdentifier;
@@ -85,15 +88,18 @@ public class LlapTaskUmbilicalExternalClient extends AbstractService implements
   private LlapTaskUmbilicalExternalResponder responder = null;
   private final ScheduledThreadPoolExecutor timer;
   private final long connectionTimeout;
+  private volatile boolean closed = false;
 
   private static class TaskHeartbeatInfo {
+    final QueryIdentifierProto queryIdentifierProto;
     final String taskAttemptId;
     final String hostname;
     String uniqueNodeId;
     final int port;
     final AtomicLong lastHeartbeat = new AtomicLong();
 
-    public TaskHeartbeatInfo(String taskAttemptId, String hostname, int port) {
+    public TaskHeartbeatInfo(QueryIdentifierProto queryIdentifierProto, String taskAttemptId, String hostname, int port) {
+      this.queryIdentifierProto = queryIdentifierProto;
       this.taskAttemptId = taskAttemptId;
       this.hostname = hostname;
       this.port = port;
@@ -137,7 +143,45 @@ public class LlapTaskUmbilicalExternalClient extends AbstractService implements
   }
 
   @Override
-  public void serviceStop() {
+  public void serviceStop() throws Exception {
+    if (closed) {
+      throw new IllegalStateException("Client has already been closed");
+    }
+    closed = true;
+
+    // Check if the request is registered - if so we can cancel the request
+    for (Map.Entry<String, TaskHeartbeatInfo> taskEntry : registeredTasks.entrySet()) {
+      terminateRequest(taskEntry.getValue());
+    }
+    registeredTasks.clear();
+
+    scheduleClientForCleanup(this);
+  }
+
+  private void terminateRequest(TaskHeartbeatInfo thi) {
+    TerminateFragmentRequestProto.Builder builder = TerminateFragmentRequestProto.newBuilder();
+    builder.setQueryIdentifier(thi.queryIdentifierProto);
+    builder.setFragmentIdentifierString(thi.taskAttemptId);
+
+    final String taskAttemptId = thi.taskAttemptId;
+    communicator.sendTerminateFragment(builder.build(), thi.hostname, thi.port,
+        new LlapProtocolClientProxy.ExecuteRequestCallback<TerminateFragmentResponseProto>() {
+
+      @Override
+      public void setResponse(TerminateFragmentResponseProto response) {
+        LOG.debug("Received terminate response for " + taskAttemptId);
+      }
+
+      @Override
+      public void indicateError(Throwable t) {
+        String msg = "Failed to terminate " + taskAttemptId;
+        LOG.error(msg, t);
+        // Don't propagate the error - termination was done as part of closing the client.
+      }
+    });
+  }
+
+  private void doShutdown() throws IOException {
     llapTaskUmbilicalServer.shutdownServer();
     timer.shutdown();
     if (this.communicator != null) {
@@ -170,7 +214,7 @@ public class LlapTaskUmbilicalExternalClient extends AbstractService implements
         vertex.getVertexIndex(), request.getFragmentNumber(), request.getAttemptNumber());
     final String fragmentId = attemptId.toString();
 
-    final TaskHeartbeatInfo thi = new TaskHeartbeatInfo(fragmentId, llapHost, llapPort);
+    final TaskHeartbeatInfo thi = new TaskHeartbeatInfo(queryIdentifierProto, fragmentId, llapHost, llapPort);
     pendingEvents.putIfAbsent(
         fragmentId, new PendingEventData(thi, Lists.<TezEvent>newArrayList()));
 
@@ -357,6 +401,13 @@ public class LlapTaskUmbilicalExternalClient extends AbstractService implements
       TezTaskAttemptID taskAttemptId = request.getCurrentTaskAttemptID();
       String taskAttemptIdString = taskAttemptId.toString();
 
+      if (closed) {
+        LOG.info("Client has already been closed, but received heartbeat from " + taskAttemptIdString);
+        // Set shouldDie response so the LLAP daemon closes this umbilical connection.
+        response.setShouldDie();
+        return response;
+      }
+
       updateHeartbeatInfo(taskAttemptIdString);
 
       List<TezEvent> tezEvents = null;
@@ -456,4 +507,36 @@ public class LlapTaskUmbilicalExternalClient extends AbstractService implements
     }
   }
 
+  private static void scheduleClientForCleanup(LlapTaskUmbilicalExternalClient client) {
+    // Add a bit of delay in case the daemon has not closed the umbilical connection yet.
+    clientCleanupExecuter.schedule(new ClientCleanupTask(client), cleanupDelay, TimeUnit.MILLISECONDS);
+  }
+
+  static final ScheduledThreadPoolExecutor clientCleanupExecuter = new ScheduledThreadPoolExecutor(1);
+  static final int cleanupDelay = 2000;
+
+  static class ClientCleanupTask implements Runnable {
+    final LlapTaskUmbilicalExternalClient client;
+
+    public ClientCleanupTask(LlapTaskUmbilicalExternalClient client) {
+      this.client = client;
+    }
+
+    @Override
+    public void run() {
+      if (client.llapTaskUmbilicalServer.getNumOpenConnections() == 0) {
+        // No more outstanding connections, ok to close.
+        try {
+          LOG.debug("Closing client");
+          client.doShutdown();
+        } catch (Exception err) {
+          LOG.error("Error cleaning up client", err);
+        }
+      } else {
+        // Reschedule this task for later.
+        LOG.debug("Client still has umbilical connection - rescheduling cleanup.");
+        scheduleClientForCleanup(client);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/4291c467/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
----------------------------------------------------------------------
diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java b/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
index 470ee6d..403381d 100644
--- a/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
+++ b/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
@@ -72,6 +72,10 @@ public class LlapTaskUmbilicalServer {
     return this.address;
   }
 
+  public int getNumOpenConnections() {
+    return server.getNumOpenConnections();
+  }
+
   public void shutdownServer() {
     if (started.get()) { // Primarily to avoid multiple shutdowns.
       started.set(false);


[09/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query83.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query83.q.out b/ql/src/test/results/clientpositive/perf/query83.q.out
index 41e8a65..f96306b 100644
--- a/ql/src/test/results/clientpositive/perf/query83.q.out
+++ b/ql/src/test/results/clientpositive/perf/query83.q.out
@@ -131,29 +131,29 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
-Reducer 12 <- Reducer 11 (SIMPLE_EDGE)
-Reducer 14 <- Map 13 (SIMPLE_EDGE)
-Reducer 16 <- Map 15 (SIMPLE_EDGE), Map 19 (SIMPLE_EDGE)
-Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
-Reducer 18 <- Reducer 17 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Map 29 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 12 <- Reducer 11 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE)
+Reducer 15 <- Map 14 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
+Reducer 16 <- Map 14 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
+Reducer 17 <- Map 14 (SIMPLE_EDGE), Reducer 26 (SIMPLE_EDGE)
+Reducer 19 <- Map 18 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 21 <- Map 20 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
-Reducer 23 <- Map 22 (SIMPLE_EDGE), Reducer 26 (SIMPLE_EDGE)
-Reducer 24 <- Reducer 23 (SIMPLE_EDGE)
-Reducer 26 <- Map 25 (SIMPLE_EDGE)
-Reducer 28 <- Map 27 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
-Reducer 29 <- Reducer 28 (SIMPLE_EDGE), Reducer 33 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 30 <- Reducer 29 (SIMPLE_EDGE)
-Reducer 33 <- Map 32 (SIMPLE_EDGE), Reducer 36 (SIMPLE_EDGE)
-Reducer 35 <- Map 34 (SIMPLE_EDGE), Reducer 38 (SIMPLE_EDGE)
-Reducer 36 <- Reducer 35 (SIMPLE_EDGE)
-Reducer 38 <- Map 37 (SIMPLE_EDGE)
+Reducer 20 <- Reducer 19 (SIMPLE_EDGE)
+Reducer 21 <- Map 18 (SIMPLE_EDGE)
+Reducer 22 <- Map 18 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 23 <- Reducer 22 (SIMPLE_EDGE)
+Reducer 24 <- Map 18 (SIMPLE_EDGE)
+Reducer 25 <- Map 18 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
+Reducer 26 <- Reducer 25 (SIMPLE_EDGE)
+Reducer 27 <- Map 18 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 15 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Reducer 18 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 5 <- Reducer 10 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-Reducer 9 <- Map 8 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 8 <- Map 28 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 16 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -171,12 +171,12 @@ Stage-0
                 Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
                 Merge Join Operator [MERGEJOIN_228] (rows=76653825 width=77)
                   Conds:RS_126._col0=RS_127._col0(Inner),RS_126._col0=RS_128._col0(Inner),Output:["_col0","_col1","_col3","_col5"]
-                <-Reducer 18 [SIMPLE_EDGE]
+                <-Reducer 10 [SIMPLE_EDGE]
                   SHUFFLE [RS_127]
                     PartitionCols:_col0
                     Group By Operator [GBY_82] (rows=34842647 width=77)
                       Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                    <-Reducer 17 [SIMPLE_EDGE]
+                    <-Reducer 9 [SIMPLE_EDGE]
                       SHUFFLE [RS_81]
                         PartitionCols:_col0
                         Group By Operator [GBY_80] (rows=69685294 width=77)
@@ -184,69 +184,46 @@ Stage-0
                           Merge Join Operator [MERGEJOIN_226] (rows=69685294 width=77)
                             Conds:RS_76._col0=RS_77._col0(Inner),Output:["_col2","_col4"]
                           <-Reducer 16 [SIMPLE_EDGE]
-                            SHUFFLE [RS_76]
-                              PartitionCols:_col0
-                              Merge Join Operator [MERGEJOIN_219] (rows=63350266 width=77)
-                                Conds:RS_73._col1=RS_74._col0(Inner),Output:["_col0","_col2","_col4"]
-                              <-Map 15 [SIMPLE_EDGE]
-                                SHUFFLE [RS_73]
-                                  PartitionCols:_col1
-                                  Select Operator [SEL_44] (rows=57591150 width=77)
-                                    Output:["_col0","_col1","_col2"]
-                                    Filter Operator [FIL_206] (rows=57591150 width=77)
-                                      predicate:(sr_item_sk is not null and sr_returned_date_sk is not null)
-                                      TableScan [TS_42] (rows=57591150 width=77)
-                                        default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_returned_date_sk","sr_item_sk","sr_return_quantity"]
-                              <-Map 19 [SIMPLE_EDGE]
-                                SHUFFLE [RS_74]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_47] (rows=462000 width=1436)
-                                    Output:["_col0","_col1"]
-                                    Filter Operator [FIL_207] (rows=462000 width=1436)
-                                      predicate:(i_item_sk is not null and i_item_id is not null)
-                                      TableScan [TS_45] (rows=462000 width=1436)
-                                        default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                          <-Reducer 21 [SIMPLE_EDGE]
                             SHUFFLE [RS_77]
                               PartitionCols:_col0
                               Merge Join Operator [MERGEJOIN_221] (rows=80353 width=1119)
                                 Conds:RS_69._col1=RS_70._col0(Inner),Output:["_col0"]
-                              <-Map 20 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_69]
                                   PartitionCols:_col1
                                   Select Operator [SEL_50] (rows=73049 width=1119)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_208] (rows=73049 width=1119)
                                       predicate:(d_date is not null and d_date_sk is not null)
-                                      TableScan [TS_48] (rows=73049 width=1119)
+                                      TableScan [TS_6] (rows=73049 width=1119)
                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                              <-Reducer 24 [SIMPLE_EDGE]
+                              <-Reducer 23 [SIMPLE_EDGE]
                                 SHUFFLE [RS_70]
                                   PartitionCols:_col0
                                   Group By Operator [GBY_67] (rows=40176 width=1119)
                                     Output:["_col0"],keys:KEY._col0
-                                  <-Reducer 23 [SIMPLE_EDGE]
+                                  <-Reducer 22 [SIMPLE_EDGE]
                                     SHUFFLE [RS_66]
                                       PartitionCols:_col0
                                       Group By Operator [GBY_65] (rows=80353 width=1119)
                                         Output:["_col0"],keys:_col0
                                         Merge Join Operator [MERGEJOIN_220] (rows=80353 width=1119)
                                           Conds:RS_61._col1=RS_62._col0(Inner),Output:["_col0"]
-                                        <-Map 22 [SIMPLE_EDGE]
+                                        <-Map 18 [SIMPLE_EDGE]
                                           SHUFFLE [RS_61]
                                             PartitionCols:_col1
                                             Select Operator [SEL_53] (rows=73049 width=1119)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_209] (rows=73049 width=1119)
                                                 predicate:(d_week_seq is not null and d_date is not null)
-                                                TableScan [TS_51] (rows=73049 width=1119)
+                                                TableScan [TS_9] (rows=73049 width=1119)
                                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                                        <-Reducer 26 [SIMPLE_EDGE]
+                                        <-Reducer 24 [SIMPLE_EDGE]
                                           SHUFFLE [RS_62]
                                             PartitionCols:_col0
                                             Group By Operator [GBY_59] (rows=18262 width=1119)
                                               Output:["_col0"],keys:KEY._col0
-                                            <-Map 25 [SIMPLE_EDGE]
+                                            <-Map 18 [SIMPLE_EDGE]
                                               SHUFFLE [RS_58]
                                                 PartitionCols:_col0
                                                 Group By Operator [GBY_57] (rows=36525 width=1119)
@@ -255,26 +232,56 @@ Stage-0
                                                     Output:["d_week_seq"]
                                                     Filter Operator [FIL_210] (rows=36525 width=1119)
                                                       predicate:((d_date) IN ('1998-01-02', '1998-10-15', '1998-11-10') and d_week_seq is not null)
-                                                      TableScan [TS_54] (rows=73049 width=1119)
-                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                <-Reducer 30 [SIMPLE_EDGE]
+                                                       Please refer to the previous TableScan [TS_9]
+                          <-Reducer 8 [SIMPLE_EDGE]
+                            SHUFFLE [RS_76]
+                              PartitionCols:_col0
+                              Merge Join Operator [MERGEJOIN_219] (rows=63350266 width=77)
+                                Conds:RS_73._col1=RS_74._col0(Inner),Output:["_col0","_col2","_col4"]
+                              <-Map 7 [SIMPLE_EDGE]
+                                SHUFFLE [RS_74]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_47] (rows=462000 width=1436)
+                                    Output:["_col0","_col1"]
+                                    Filter Operator [FIL_207] (rows=462000 width=1436)
+                                      predicate:(i_item_sk is not null and i_item_id is not null)
+                                      TableScan [TS_3] (rows=462000 width=1436)
+                                        default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
+                              <-Map 28 [SIMPLE_EDGE]
+                                SHUFFLE [RS_73]
+                                  PartitionCols:_col1
+                                  Select Operator [SEL_44] (rows=57591150 width=77)
+                                    Output:["_col0","_col1","_col2"]
+                                    Filter Operator [FIL_206] (rows=57591150 width=77)
+                                      predicate:(sr_item_sk is not null and sr_returned_date_sk is not null)
+                                      TableScan [TS_42] (rows=57591150 width=77)
+                                        default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_returned_date_sk","sr_item_sk","sr_return_quantity"]
+                <-Reducer 13 [SIMPLE_EDGE]
                   SHUFFLE [RS_128]
                     PartitionCols:_col0
                     Group By Operator [GBY_124] (rows=8711072 width=92)
                       Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                    <-Reducer 29 [SIMPLE_EDGE]
+                    <-Reducer 12 [SIMPLE_EDGE]
                       SHUFFLE [RS_123]
                         PartitionCols:_col0
                         Group By Operator [GBY_122] (rows=17422145 width=92)
                           Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col4
                           Merge Join Operator [MERGEJOIN_227] (rows=17422145 width=92)
                             Conds:RS_118._col0=RS_119._col0(Inner),Output:["_col2","_col4"]
-                          <-Reducer 28 [SIMPLE_EDGE]
+                          <-Reducer 11 [SIMPLE_EDGE]
                             SHUFFLE [RS_118]
                               PartitionCols:_col0
                               Merge Join Operator [MERGEJOIN_222] (rows=15838314 width=92)
                                 Conds:RS_115._col1=RS_116._col0(Inner),Output:["_col0","_col2","_col4"]
-                              <-Map 27 [SIMPLE_EDGE]
+                              <-Map 7 [SIMPLE_EDGE]
+                                SHUFFLE [RS_116]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_89] (rows=462000 width=1436)
+                                    Output:["_col0","_col1"]
+                                    Filter Operator [FIL_212] (rows=462000 width=1436)
+                                      predicate:(i_item_sk is not null and i_item_id is not null)
+                                       Please refer to the previous TableScan [TS_3]
+                              <-Map 29 [SIMPLE_EDGE]
                                 SHUFFLE [RS_115]
                                   PartitionCols:_col1
                                   Select Operator [SEL_86] (rows=14398467 width=92)
@@ -283,56 +290,45 @@ Stage-0
                                       predicate:(wr_item_sk is not null and wr_returned_date_sk is not null)
                                       TableScan [TS_84] (rows=14398467 width=92)
                                         default@web_returns,web_returns,Tbl:COMPLETE,Col:NONE,Output:["wr_returned_date_sk","wr_item_sk","wr_return_quantity"]
-                              <-Map 31 [SIMPLE_EDGE]
-                                SHUFFLE [RS_116]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_89] (rows=462000 width=1436)
-                                    Output:["_col0","_col1"]
-                                    Filter Operator [FIL_212] (rows=462000 width=1436)
-                                      predicate:(i_item_sk is not null and i_item_id is not null)
-                                      TableScan [TS_87] (rows=462000 width=1436)
-                                        default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                          <-Reducer 33 [SIMPLE_EDGE]
+                          <-Reducer 17 [SIMPLE_EDGE]
                             SHUFFLE [RS_119]
                               PartitionCols:_col0
                               Merge Join Operator [MERGEJOIN_224] (rows=80353 width=1119)
                                 Conds:RS_111._col1=RS_112._col0(Inner),Output:["_col0"]
-                              <-Map 32 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_111]
                                   PartitionCols:_col1
                                   Select Operator [SEL_92] (rows=73049 width=1119)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_213] (rows=73049 width=1119)
                                       predicate:(d_date is not null and d_date_sk is not null)
-                                      TableScan [TS_90] (rows=73049 width=1119)
-                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                              <-Reducer 36 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_6]
+                              <-Reducer 26 [SIMPLE_EDGE]
                                 SHUFFLE [RS_112]
                                   PartitionCols:_col0
                                   Group By Operator [GBY_109] (rows=40176 width=1119)
                                     Output:["_col0"],keys:KEY._col0
-                                  <-Reducer 35 [SIMPLE_EDGE]
+                                  <-Reducer 25 [SIMPLE_EDGE]
                                     SHUFFLE [RS_108]
                                       PartitionCols:_col0
                                       Group By Operator [GBY_107] (rows=80353 width=1119)
                                         Output:["_col0"],keys:_col0
                                         Merge Join Operator [MERGEJOIN_223] (rows=80353 width=1119)
                                           Conds:RS_103._col1=RS_104._col0(Inner),Output:["_col0"]
-                                        <-Map 34 [SIMPLE_EDGE]
+                                        <-Map 18 [SIMPLE_EDGE]
                                           SHUFFLE [RS_103]
                                             PartitionCols:_col1
                                             Select Operator [SEL_95] (rows=73049 width=1119)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_214] (rows=73049 width=1119)
                                                 predicate:(d_week_seq is not null and d_date is not null)
-                                                TableScan [TS_93] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                                        <-Reducer 38 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_9]
+                                        <-Reducer 27 [SIMPLE_EDGE]
                                           SHUFFLE [RS_104]
                                             PartitionCols:_col0
                                             Group By Operator [GBY_101] (rows=18262 width=1119)
                                               Output:["_col0"],keys:KEY._col0
-                                            <-Map 37 [SIMPLE_EDGE]
+                                            <-Map 18 [SIMPLE_EDGE]
                                               SHUFFLE [RS_100]
                                                 PartitionCols:_col0
                                                 Group By Operator [GBY_99] (rows=36525 width=1119)
@@ -341,8 +337,7 @@ Stage-0
                                                     Output:["d_week_seq"]
                                                     Filter Operator [FIL_215] (rows=36525 width=1119)
                                                       predicate:((d_date) IN ('1998-01-02', '1998-10-15', '1998-11-10') and d_week_seq is not null)
-                                                      TableScan [TS_96] (rows=73049 width=1119)
-                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
+                                                       Please refer to the previous TableScan [TS_9]
                 <-Reducer 4 [SIMPLE_EDGE]
                   SHUFFLE [RS_126]
                     PartitionCols:_col0
@@ -355,70 +350,45 @@ Stage-0
                           Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col4
                           Merge Join Operator [MERGEJOIN_225] (rows=34846646 width=106)
                             Conds:RS_34._col0=RS_35._col0(Inner),Output:["_col2","_col4"]
-                          <-Reducer 2 [SIMPLE_EDGE]
-                            SHUFFLE [RS_34]
-                              PartitionCols:_col0
-                              Merge Join Operator [MERGEJOIN_216] (rows=31678769 width=106)
-                                Conds:RS_31._col1=RS_32._col0(Inner),Output:["_col0","_col2","_col4"]
-                              <-Map 1 [SIMPLE_EDGE]
-                                SHUFFLE [RS_31]
-                                  PartitionCols:_col1
-                                  Select Operator [SEL_2] (rows=28798881 width=106)
-                                    Output:["_col0","_col1","_col2"]
-                                    Filter Operator [FIL_201] (rows=28798881 width=106)
-                                      predicate:(cr_item_sk is not null and cr_returned_date_sk is not null)
-                                      TableScan [TS_0] (rows=28798881 width=106)
-                                        default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_returned_date_sk","cr_item_sk","cr_return_quantity"]
-                              <-Map 7 [SIMPLE_EDGE]
-                                SHUFFLE [RS_32]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_5] (rows=462000 width=1436)
-                                    Output:["_col0","_col1"]
-                                    Filter Operator [FIL_202] (rows=462000 width=1436)
-                                      predicate:(i_item_sk is not null and i_item_id is not null)
-                                      TableScan [TS_3] (rows=462000 width=1436)
-                                        default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                          <-Reducer 9 [SIMPLE_EDGE]
+                          <-Reducer 15 [SIMPLE_EDGE]
                             SHUFFLE [RS_35]
                               PartitionCols:_col0
                               Merge Join Operator [MERGEJOIN_218] (rows=80353 width=1119)
                                 Conds:RS_27._col1=RS_28._col0(Inner),Output:["_col0"]
-                              <-Map 8 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_27]
                                   PartitionCols:_col1
                                   Select Operator [SEL_8] (rows=73049 width=1119)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_203] (rows=73049 width=1119)
                                       predicate:(d_date is not null and d_date_sk is not null)
-                                      TableScan [TS_6] (rows=73049 width=1119)
-                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                              <-Reducer 12 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_6]
+                              <-Reducer 20 [SIMPLE_EDGE]
                                 SHUFFLE [RS_28]
                                   PartitionCols:_col0
                                   Group By Operator [GBY_25] (rows=40176 width=1119)
                                     Output:["_col0"],keys:KEY._col0
-                                  <-Reducer 11 [SIMPLE_EDGE]
+                                  <-Reducer 19 [SIMPLE_EDGE]
                                     SHUFFLE [RS_24]
                                       PartitionCols:_col0
                                       Group By Operator [GBY_23] (rows=80353 width=1119)
                                         Output:["_col0"],keys:_col0
                                         Merge Join Operator [MERGEJOIN_217] (rows=80353 width=1119)
                                           Conds:RS_19._col1=RS_20._col0(Inner),Output:["_col0"]
-                                        <-Map 10 [SIMPLE_EDGE]
+                                        <-Map 18 [SIMPLE_EDGE]
                                           SHUFFLE [RS_19]
                                             PartitionCols:_col1
                                             Select Operator [SEL_11] (rows=73049 width=1119)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_204] (rows=73049 width=1119)
                                                 predicate:(d_week_seq is not null and d_date is not null)
-                                                TableScan [TS_9] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                                        <-Reducer 14 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_9]
+                                        <-Reducer 21 [SIMPLE_EDGE]
                                           SHUFFLE [RS_20]
                                             PartitionCols:_col0
                                             Group By Operator [GBY_17] (rows=18262 width=1119)
                                               Output:["_col0"],keys:KEY._col0
-                                            <-Map 13 [SIMPLE_EDGE]
+                                            <-Map 18 [SIMPLE_EDGE]
                                               SHUFFLE [RS_16]
                                                 PartitionCols:_col0
                                                 Group By Operator [GBY_15] (rows=36525 width=1119)
@@ -427,6 +397,27 @@ Stage-0
                                                     Output:["d_week_seq"]
                                                     Filter Operator [FIL_205] (rows=36525 width=1119)
                                                       predicate:((d_date) IN ('1998-01-02', '1998-10-15', '1998-11-10') and d_week_seq is not null)
-                                                      TableScan [TS_12] (rows=73049 width=1119)
-                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
+                                                       Please refer to the previous TableScan [TS_9]
+                          <-Reducer 2 [SIMPLE_EDGE]
+                            SHUFFLE [RS_34]
+                              PartitionCols:_col0
+                              Merge Join Operator [MERGEJOIN_216] (rows=31678769 width=106)
+                                Conds:RS_31._col1=RS_32._col0(Inner),Output:["_col0","_col2","_col4"]
+                              <-Map 7 [SIMPLE_EDGE]
+                                SHUFFLE [RS_32]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_5] (rows=462000 width=1436)
+                                    Output:["_col0","_col1"]
+                                    Filter Operator [FIL_202] (rows=462000 width=1436)
+                                      predicate:(i_item_sk is not null and i_item_id is not null)
+                                       Please refer to the previous TableScan [TS_3]
+                              <-Map 1 [SIMPLE_EDGE]
+                                SHUFFLE [RS_31]
+                                  PartitionCols:_col1
+                                  Select Operator [SEL_2] (rows=28798881 width=106)
+                                    Output:["_col0","_col1","_col2"]
+                                    Filter Operator [FIL_201] (rows=28798881 width=106)
+                                      predicate:(cr_item_sk is not null and cr_returned_date_sk is not null)
+                                      TableScan [TS_0] (rows=28798881 width=106)
+                                        default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_returned_date_sk","cr_item_sk","cr_return_quantity"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query85.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query85.q.out b/ql/src/test/results/clientpositive/perf/query85.q.out
index 168bcd2..ba8659e 100644
--- a/ql/src/test/results/clientpositive/perf/query85.q.out
+++ b/ql/src/test/results/clientpositive/perf/query85.q.out
@@ -12,7 +12,7 @@ Reducer 4 <- Map 13 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Map 14 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Map 15 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
 Reducer 7 <- Map 16 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-Reducer 8 <- Map 17 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 8 <- Map 16 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
@@ -38,15 +38,15 @@ Stage-0
                       Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(_col5)","avg(_col17)","avg(_col16)"],keys:_col19
                       Merge Join Operator [MERGEJOIN_105] (rows=2047980 width=385)
                         Conds:RS_44._col13, _col24, _col25=RS_45._col0, _col1, _col2(Inner),Output:["_col5","_col16","_col17","_col19"]
-                      <-Map 17 [SIMPLE_EDGE]
+                      <-Map 16 [SIMPLE_EDGE]
                         SHUFFLE [RS_45]
                           PartitionCols:_col0, _col1, _col2
                           Select Operator [SEL_23] (rows=1861800 width=385)
                             Output:["_col0","_col1","_col2"]
                             Filter Operator [FIL_98] (rows=1861800 width=385)
                               predicate:(((cd_education_status = '4 yr Degree') or (cd_education_status = 'Primary') or (cd_education_status = 'Advanced Degree')) and ((cd_marital_status = 'M') or (cd_marital_status = 'D') or (cd_marital_status = 'U')) and cd_demo_sk is not null and cd_marital_status is not null and cd_education_status is not null)
-                              TableScan [TS_21] (rows=1861800 width=385)
-                                default@customer_demographics,cd2,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status","cd_education_status"]
+                              TableScan [TS_18] (rows=1861800 width=385)
+                                default@customer_demographics,cd1,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status","cd_education_status"]
                       <-Reducer 7 [SIMPLE_EDGE]
                         SHUFFLE [RS_44]
                           PartitionCols:_col13, _col24, _col25
@@ -61,8 +61,7 @@ Stage-0
                                   Output:["_col0","_col1","_col2"]
                                   Filter Operator [FIL_97] (rows=1861800 width=385)
                                     predicate:(((cd_education_status = '4 yr Degree') or (cd_education_status = 'Primary') or (cd_education_status = 'Advanced Degree')) and ((cd_marital_status = 'M') or (cd_marital_status = 'D') or (cd_marital_status = 'U')) and cd_demo_sk is not null and cd_marital_status is not null and cd_education_status is not null)
-                                    TableScan [TS_18] (rows=1861800 width=385)
-                                      default@customer_demographics,cd1,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status","cd_education_status"]
+                                     Please refer to the previous TableScan [TS_18]
                             <-Reducer 6 [SIMPLE_EDGE]
                               SHUFFLE [RS_40]
                                 PartitionCols:_col11

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query87.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query87.q.out b/ql/src/test/results/clientpositive/perf/query87.q.out
index c7dd1d9..58a33d9 100644
--- a/ql/src/test/results/clientpositive/perf/query87.q.out
+++ b/ql/src/test/results/clientpositive/perf/query87.q.out
@@ -43,14 +43,14 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 13 <- Map 12 (SIMPLE_EDGE), Map 16 (SIMPLE_EDGE)
-Reducer 14 <- Map 17 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-Reducer 15 <- Reducer 14 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 22 (SIMPLE_EDGE)
+Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 18 (SIMPLE_EDGE)
+Reducer 12 <- Map 17 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 14 <- Map 10 (SIMPLE_EDGE), Map 19 (SIMPLE_EDGE)
+Reducer 15 <- Map 17 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+Reducer 16 <- Reducer 15 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 10 (SIMPLE_EDGE)
-Reducer 20 <- Map 23 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
-Reducer 21 <- Reducer 20 (SIMPLE_EDGE), Union 7 (CONTAINS)
-Reducer 3 <- Map 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 3 <- Map 17 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 8 <- Union 7 (SIMPLE_EDGE)
@@ -76,7 +76,7 @@ Stage-0
                     Group By Operator [GBY_96] (rows=27225312 width=129)
                       Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2
                     <-Union 7 [SIMPLE_EDGE]
-                      <-Reducer 21 [CONTAINS]
+                      <-Reducer 16 [CONTAINS]
                         Reduce Output Operator [RS_95]
                           PartitionCols:_col0, _col1, _col2
                           Group By Operator [GBY_94] (rows=54450625 width=129)
@@ -89,28 +89,37 @@ Stage-0
                                   Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
                                   Group By Operator [GBY_83] (rows=87121617 width=135)
                                     Output:["_col0","_col1","_col2"],keys:KEY._col0, KEY._col1, KEY._col2
-                                  <-Reducer 20 [SIMPLE_EDGE]
+                                  <-Reducer 15 [SIMPLE_EDGE]
                                     SHUFFLE [RS_82]
                                       PartitionCols:_col0, _col1, _col2
                                       Group By Operator [GBY_81] (rows=174243235 width=135)
                                         Output:["_col0","_col1","_col2"],keys:_col7, _col6, _col3
                                         Merge Join Operator [MERGEJOIN_135] (rows=174243235 width=135)
                                           Conds:RS_77._col1=RS_78._col0(Inner),Output:["_col3","_col6","_col7"]
-                                        <-Map 23 [SIMPLE_EDGE]
+                                        <-Map 17 [SIMPLE_EDGE]
                                           SHUFFLE [RS_78]
                                             PartitionCols:_col0
                                             Select Operator [SEL_73] (rows=80000000 width=860)
                                               Output:["_col0","_col1","_col2"]
                                               Filter Operator [FIL_128] (rows=80000000 width=860)
                                                 predicate:c_customer_sk is not null
-                                                TableScan [TS_71] (rows=80000000 width=860)
+                                                TableScan [TS_6] (rows=80000000 width=860)
                                                   default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_first_name","c_last_name"]
-                                        <-Reducer 19 [SIMPLE_EDGE]
+                                        <-Reducer 14 [SIMPLE_EDGE]
                                           SHUFFLE [RS_77]
                                             PartitionCols:_col1
                                             Merge Join Operator [MERGEJOIN_134] (rows=158402938 width=135)
                                               Conds:RS_74._col0=RS_75._col0(Inner),Output:["_col1","_col3"]
-                                            <-Map 18 [SIMPLE_EDGE]
+                                            <-Map 10 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_75]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_70] (rows=8116 width=1119)
+                                                  Output:["_col0","_col1"]
+                                                  Filter Operator [FIL_127] (rows=8116 width=1119)
+                                                    predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                                    TableScan [TS_3] (rows=73049 width=1119)
+                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
+                                            <-Map 19 [SIMPLE_EDGE]
                                               SHUFFLE [RS_74]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_67] (rows=144002668 width=135)
@@ -119,15 +128,6 @@ Stage-0
                                                     predicate:(ws_sold_date_sk is not null and ws_bill_customer_sk is not null)
                                                     TableScan [TS_65] (rows=144002668 width=135)
                                                       default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_bill_customer_sk"]
-                                            <-Map 22 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_75]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_70] (rows=8116 width=1119)
-                                                  Output:["_col0","_col1"]
-                                                  Filter Operator [FIL_127] (rows=8116 width=1119)
-                                                    predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                    TableScan [TS_68] (rows=73049 width=1119)
-                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
                       <-Reducer 6 [CONTAINS]
                         Reduce Output Operator [RS_95]
                           PartitionCols:_col0, _col1, _col2
@@ -146,7 +146,7 @@ Stage-0
                                       Group By Operator [GBY_57] (rows=130677808 width=103)
                                         Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2
                                       <-Union 5 [SIMPLE_EDGE]
-                                        <-Reducer 15 [CONTAINS]
+                                        <-Reducer 13 [CONTAINS]
                                           Reduce Output Operator [RS_56]
                                             PartitionCols:_col0, _col1, _col2
                                             Group By Operator [GBY_55] (rows=261355616 width=103)
@@ -159,7 +159,7 @@ Stage-0
                                                     Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
                                                     Group By Operator [GBY_44] (rows=174233858 width=135)
                                                       Output:["_col0","_col1","_col2"],keys:KEY._col0, KEY._col1, KEY._col2
-                                                    <-Reducer 14 [SIMPLE_EDGE]
+                                                    <-Reducer 12 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_43]
                                                         PartitionCols:_col0, _col1, _col2
                                                         Group By Operator [GBY_42] (rows=348467716 width=135)
@@ -173,14 +173,21 @@ Stage-0
                                                                 Output:["_col0","_col1","_col2"]
                                                                 Filter Operator [FIL_125] (rows=80000000 width=860)
                                                                   predicate:c_customer_sk is not null
-                                                                  TableScan [TS_32] (rows=80000000 width=860)
-                                                                    default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_first_name","c_last_name"]
-                                                          <-Reducer 13 [SIMPLE_EDGE]
+                                                                   Please refer to the previous TableScan [TS_6]
+                                                          <-Reducer 11 [SIMPLE_EDGE]
                                                             SHUFFLE [RS_38]
                                                               PartitionCols:_col1
                                                               Merge Join Operator [MERGEJOIN_132] (rows=316788826 width=135)
                                                                 Conds:RS_35._col0=RS_36._col0(Inner),Output:["_col1","_col3"]
-                                                              <-Map 12 [SIMPLE_EDGE]
+                                                              <-Map 10 [SIMPLE_EDGE]
+                                                                SHUFFLE [RS_36]
+                                                                  PartitionCols:_col0
+                                                                  Select Operator [SEL_31] (rows=8116 width=1119)
+                                                                    Output:["_col0","_col1"]
+                                                                    Filter Operator [FIL_124] (rows=8116 width=1119)
+                                                                      predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                                                       Please refer to the previous TableScan [TS_3]
+                                                              <-Map 18 [SIMPLE_EDGE]
                                                                 SHUFFLE [RS_35]
                                                                   PartitionCols:_col0
                                                                   Select Operator [SEL_28] (rows=287989836 width=135)
@@ -189,15 +196,6 @@ Stage-0
                                                                       predicate:(cs_sold_date_sk is not null and cs_bill_customer_sk is not null)
                                                                       TableScan [TS_26] (rows=287989836 width=135)
                                                                         default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_customer_sk"]
-                                                              <-Map 16 [SIMPLE_EDGE]
-                                                                SHUFFLE [RS_36]
-                                                                  PartitionCols:_col0
-                                                                  Select Operator [SEL_31] (rows=8116 width=1119)
-                                                                    Output:["_col0","_col1"]
-                                                                    Filter Operator [FIL_124] (rows=8116 width=1119)
-                                                                      predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                                      TableScan [TS_29] (rows=73049 width=1119)
-                                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
                                         <-Reducer 4 [CONTAINS]
                                           Reduce Output Operator [RS_56]
                                             PartitionCols:_col0, _col1, _col2
@@ -218,20 +216,27 @@ Stage-0
                                                           Output:["_col0","_col1","_col2"],keys:_col7, _col6, _col3
                                                           Merge Join Operator [MERGEJOIN_131] (rows=696954748 width=88)
                                                             Conds:RS_12._col1=RS_13._col0(Inner),Output:["_col3","_col6","_col7"]
-                                                          <-Map 11 [SIMPLE_EDGE]
+                                                          <-Map 17 [SIMPLE_EDGE]
                                                             SHUFFLE [RS_13]
                                                               PartitionCols:_col0
                                                               Select Operator [SEL_8] (rows=80000000 width=860)
                                                                 Output:["_col0","_col1","_col2"]
                                                                 Filter Operator [FIL_122] (rows=80000000 width=860)
                                                                   predicate:c_customer_sk is not null
-                                                                  TableScan [TS_6] (rows=80000000 width=860)
-                                                                    default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_first_name","c_last_name"]
+                                                                   Please refer to the previous TableScan [TS_6]
                                                           <-Reducer 2 [SIMPLE_EDGE]
                                                             SHUFFLE [RS_12]
                                                               PartitionCols:_col1
                                                               Merge Join Operator [MERGEJOIN_130] (rows=633595212 width=88)
                                                                 Conds:RS_9._col0=RS_10._col0(Inner),Output:["_col1","_col3"]
+                                                              <-Map 10 [SIMPLE_EDGE]
+                                                                SHUFFLE [RS_10]
+                                                                  PartitionCols:_col0
+                                                                  Select Operator [SEL_5] (rows=8116 width=1119)
+                                                                    Output:["_col0","_col1"]
+                                                                    Filter Operator [FIL_121] (rows=8116 width=1119)
+                                                                      predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                                                       Please refer to the previous TableScan [TS_3]
                                                               <-Map 1 [SIMPLE_EDGE]
                                                                 SHUFFLE [RS_9]
                                                                   PartitionCols:_col0
@@ -241,13 +246,4 @@ Stage-0
                                                                       predicate:(ss_sold_date_sk is not null and ss_customer_sk is not null)
                                                                       TableScan [TS_0] (rows=575995635 width=88)
                                                                         default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk"]
-                                                              <-Map 10 [SIMPLE_EDGE]
-                                                                SHUFFLE [RS_10]
-                                                                  PartitionCols:_col0
-                                                                  Select Operator [SEL_5] (rows=8116 width=1119)
-                                                                    Output:["_col0","_col1"]
-                                                                    Filter Operator [FIL_121] (rows=8116 width=1119)
-                                                                      predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                                      TableScan [TS_3] (rows=73049 width=1119)
-                                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
 


[04/50] [abbrv] hive git commit: HIVE-16651: LlapProtocolClientProxy stack trace when using llap input format (Jason Dere, reviewed by Siddharth Seth)

Posted by we...@apache.org.
HIVE-16651: LlapProtocolClientProxy stack trace when using llap input format (Jason Dere, reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fea6df6b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fea6df6b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fea6df6b

Branch: refs/heads/hive-14535
Commit: fea6df6bcca6fcd76b95257d838e6d741dcf3f2b
Parents: 3baaca7
Author: Jason Dere <jd...@hortonworks.com>
Authored: Fri May 12 16:39:50 2017 -0700
Committer: Jason Dere <jd...@hortonworks.com>
Committed: Fri May 12 16:39:50 2017 -0700

----------------------------------------------------------------------
 .../test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java    | 1 +
 .../apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java    | 5 ++++-
 2 files changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/fea6df6b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
index 4cc9045..606a263 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlap.java
@@ -425,6 +425,7 @@ public class TestJdbcWithMiniLlap {
         rowProcessor.process(row);
         ++rowCount;
       }
+      reader.close();
     }
 
     return rowCount;

http://git-wip-us.apache.org/repos/asf/hive/blob/fea6df6b/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java
----------------------------------------------------------------------
diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java b/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java
index ce75d72..5df19d3 100644
--- a/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java
+++ b/llap-client/src/java/org/apache/hadoop/hive/llap/tez/LlapProtocolClientProxy.java
@@ -24,6 +24,7 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.Callable;
+import java.util.concurrent.CancellationException;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ExecutorService;
@@ -129,7 +130,9 @@ public class LlapProtocolClientProxy extends AbstractService {
 
       @Override
       public void onFailure(Throwable t) {
-        LOG.warn("RequestManager shutdown with error", t);
+        if (!(t instanceof CancellationException)) {
+          LOG.warn("RequestManager shutdown with error", t);
+        }
       }
     });
   }


[49/50] [abbrv] hive git commit: HIVE-16660: Not able to add partition for views in hive when sentry is enabled (Yongzhi Chen, reviewed by Aihua Xu)

Posted by we...@apache.org.
HIVE-16660: Not able to add partition for views in hive when sentry is enabled (Yongzhi Chen, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6e0c52e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6e0c52e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6e0c52e6

Branch: refs/heads/hive-14535
Commit: 6e0c52e6883cb55de3b8557504c13f3c8dfb619f
Parents: 2bcbd29
Author: Yongzhi Chen <yc...@apache.org>
Authored: Fri May 12 10:06:04 2017 -0400
Committer: Yongzhi Chen <yc...@apache.org>
Committed: Tue May 16 13:53:33 2017 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/metadata/Partition.java     | 10 ++++++++--
 .../apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |  4 +++-
 2 files changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/6e0c52e6/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
index bff1688..75179ab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
@@ -213,9 +213,15 @@ public class Partition implements Serializable {
 
   public Path getDataLocation() {
     if (table.isPartitioned()) {
-      return new Path(tPartition.getSd().getLocation());
+      if (tPartition.getSd() == null)
+        return null;
+      else
+        return new Path(tPartition.getSd().getLocation());
     } else {
-      return new Path(table.getTTable().getSd().getLocation());
+      if (table.getTTable() == null || table.getTTable().getSd() == null)
+        return null;
+      else
+        return new Path(table.getTTable().getSd().getLocation());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6e0c52e6/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 77bc12c..7601267 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -2934,7 +2934,9 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         }
         cmd.append(")");
       }
-      Driver driver = new Driver(conf);
+      SessionState ss = SessionState.get();
+      String uName = (ss == null? null: ss.getUserName());
+      Driver driver = new Driver(conf, uName);
       int rc = driver.compile(cmd.toString(), false);
       if (rc != 0) {
         throw new SemanticException(ErrorMsg.NO_VALID_PARTN.getMsg());


[32/50] [abbrv] hive git commit: HIVE-1010: Implement INFORMATION_SCHEMA in Hive (Gunther Hagleitner, reviewed by Thejas Nair)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/test/results/clientpositive/llap/sysdb.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/sysdb.q.out b/ql/src/test/results/clientpositive/llap/sysdb.q.out
new file mode 100644
index 0000000..0ddc373
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/sysdb.q.out
@@ -0,0 +1,3447 @@
+PREHOOK: query: create table src_buck (key int, value string) clustered by(value) into 2 buckets
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_buck
+POSTHOOK: query: create table src_buck (key int, value string) clustered by(value) into 2 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_buck
+PREHOOK: query: create table src_skew (key int) skewed by (key) on (1,2,3)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_skew
+POSTHOOK: query: create table src_skew (key int) skewed by (key) on (1,2,3)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_skew
+PREHOOK: query: CREATE TABLE scr_txn (key int, value string)
+    CLUSTERED BY (key) INTO 2 BUCKETS STORED AS ORC
+    TBLPROPERTIES (
+      "transactional"="true",
+      "compactor.mapreduce.map.memory.mb"="2048",
+      "compactorthreshold.hive.compactor.delta.num.threshold"="4",
+      "compactorthreshold.hive.compactor.delta.pct.threshold"="0.5")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@scr_txn
+POSTHOOK: query: CREATE TABLE scr_txn (key int, value string)
+    CLUSTERED BY (key) INTO 2 BUCKETS STORED AS ORC
+    TBLPROPERTIES (
+      "transactional"="true",
+      "compactor.mapreduce.map.memory.mb"="2048",
+      "compactorthreshold.hive.compactor.delta.num.threshold"="4",
+      "compactorthreshold.hive.compactor.delta.pct.threshold"="0.5")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@scr_txn
+PREHOOK: query: CREATE TEMPORARY TABLE src_tmp (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_tmp
+POSTHOOK: query: CREATE TEMPORARY TABLE src_tmp (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_tmp
+PREHOOK: query: CREATE TABLE moretypes (a decimal(10,2), b tinyint, c smallint, d int, e bigint, f varchar(10), g char(3))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@moretypes
+POSTHOOK: query: CREATE TABLE moretypes (a decimal(10,2), b tinyint, c smallint, d int, e bigint, f varchar(10), g char(3))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@moretypes
+PREHOOK: query: show grant user hive_test_user
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user
+POSTHOOK: type: SHOW_GRANT
+default	alltypesorc			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	alltypesorc			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	alltypesorc			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	alltypesorc			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	cbo_t1			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	cbo_t1			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	cbo_t1			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	cbo_t1			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	cbo_t2			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	cbo_t2			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	cbo_t2			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	cbo_t2			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	cbo_t3			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	cbo_t3			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	cbo_t3			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	cbo_t3			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	lineitem			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	lineitem			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	lineitem			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	lineitem			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	moretypes			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	moretypes			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	moretypes			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	moretypes			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	part			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	part			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	part			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	part			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	scr_txn			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	scr_txn			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	scr_txn			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	scr_txn			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src1			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src1			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src1			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src1			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src_buck			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src_buck			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src_buck			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src_buck			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src_cbo			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src_cbo			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src_cbo			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src_cbo			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src_json			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src_json			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src_json			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src_json			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src_sequencefile			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src_sequencefile			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src_sequencefile			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src_sequencefile			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src_skew			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src_skew			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src_skew			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src_skew			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	src_thrift			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	src_thrift			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	src_thrift			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	src_thrift			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	srcbucket			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	srcbucket			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	srcbucket			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	srcbucket			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	srcbucket2			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	srcbucket2			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	srcbucket2			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	srcbucket2			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+default	srcpart			hive_test_user	USER	DELETE	true	-1	hive_test_user
+default	srcpart			hive_test_user	USER	INSERT	true	-1	hive_test_user
+default	srcpart			hive_test_user	USER	SELECT	true	-1	hive_test_user
+default	srcpart			hive_test_user	USER	UPDATE	true	-1	hive_test_user
+PREHOOK: query: CREATE DATABASE SYS
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:SYS
+POSTHOOK: query: CREATE DATABASE SYS
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:SYS
+PREHOOK: query: USE SYS
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:sys
+POSTHOOK: query: USE SYS
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `BUCKETING_COLS` (
+  `SD_ID` bigint,
+  `BUCKET_COL_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_BUCKETING_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"BUCKET_COL_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"BUCKETING_COLS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@BUCKETING_COLS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `BUCKETING_COLS` (
+  `SD_ID` bigint,
+  `BUCKET_COL_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_BUCKETING_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"BUCKET_COL_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"BUCKETING_COLS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@BUCKETING_COLS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `CDS` (
+  `CD_ID` bigint,
+  CONSTRAINT `SYS_PK_CDS` PRIMARY KEY (`CD_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CD_ID\"
+FROM
+  \"CDS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@CDS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `CDS` (
+  `CD_ID` bigint,
+  CONSTRAINT `SYS_PK_CDS` PRIMARY KEY (`CD_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CD_ID\"
+FROM
+  \"CDS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@CDS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `COLUMNS_V2` (
+  `CD_ID` bigint,
+  `COMMENT` string,
+  `COLUMN_NAME` string,
+  `TYPE_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_COLUMN_V2` PRIMARY KEY (`CD_ID`,`COLUMN_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CD_ID\",
+  \"COMMENT\",
+  \"COLUMN_NAME\",
+  \"TYPE_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"COLUMNS_V2\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@COLUMNS_V2
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `COLUMNS_V2` (
+  `CD_ID` bigint,
+  `COMMENT` string,
+  `COLUMN_NAME` string,
+  `TYPE_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_COLUMN_V2` PRIMARY KEY (`CD_ID`,`COLUMN_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CD_ID\",
+  \"COMMENT\",
+  \"COLUMN_NAME\",
+  \"TYPE_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"COLUMNS_V2\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@COLUMNS_V2
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` (
+  `DB_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_DATABASE_PARAMS` PRIMARY KEY (`DB_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"DATABASE_PARAMS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@DATABASE_PARAMS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` (
+  `DB_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_DATABASE_PARAMS` PRIMARY KEY (`DB_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"DATABASE_PARAMS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@DATABASE_PARAMS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `DBS` (
+  `DB_ID` bigint,
+  `DB_LOCATION_URI` string,
+  `NAME` string,
+  `OWNER_NAME` string,
+  `OWNER_TYPE` string,
+  CONSTRAINT `SYS_PK_DBS` PRIMARY KEY (`DB_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_ID\",
+  \"DB_LOCATION_URI\",
+  \"NAME\",
+  \"OWNER_NAME\",
+  \"OWNER_TYPE\"
+FROM
+  DBS"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@DBS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `DBS` (
+  `DB_ID` bigint,
+  `DB_LOCATION_URI` string,
+  `NAME` string,
+  `OWNER_NAME` string,
+  `OWNER_TYPE` string,
+  CONSTRAINT `SYS_PK_DBS` PRIMARY KEY (`DB_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_ID\",
+  \"DB_LOCATION_URI\",
+  \"NAME\",
+  \"OWNER_NAME\",
+  \"OWNER_TYPE\"
+FROM
+  DBS"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@DBS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
+  `DB_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `DB_PRIV` string,
+  CONSTRAINT `SYS_PK_DB_PRIVS` PRIMARY KEY (`DB_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"DB_PRIV\"
+FROM
+  \"DB_PRIVS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@DB_PRIVS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
+  `DB_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `DB_PRIV` string,
+  CONSTRAINT `SYS_PK_DB_PRIVS` PRIMARY KEY (`DB_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"DB_PRIV\"
+FROM
+  \"DB_PRIVS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@DB_PRIVS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
+  `USER_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` string,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `USER_PRIV` string,
+  CONSTRAINT `SYS_PK_GLOBAL_PRIVS` PRIMARY KEY (`USER_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"USER_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"USER_PRIV\"
+FROM
+  \"GLOBAL_PRIVS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@GLOBAL_PRIVS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
+  `USER_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` string,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `USER_PRIV` string,
+  CONSTRAINT `SYS_PK_GLOBAL_PRIVS` PRIMARY KEY (`USER_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"USER_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"USER_PRIV\"
+FROM
+  \"GLOBAL_PRIVS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@GLOBAL_PRIVS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `IDXS` (
+  `INDEX_ID` bigint,
+  `CREATE_TIME` int,
+  `DEFERRED_REBUILD` boolean,
+  `INDEX_HANDLER_CLASS` string,
+  `INDEX_NAME` string,
+  `INDEX_TBL_ID` bigint,
+  `LAST_ACCESS_TIME` int,
+  `ORIG_TBL_ID` bigint,
+  `SD_ID` bigint,
+  CONSTRAINT `SYS_PK_IDXS` PRIMARY KEY (`INDEX_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"INDEX_ID\",
+  \"CREATE_TIME\",
+  \"DEFERRED_REBUILD\",
+  \"INDEX_HANDLER_CLASS\",
+  \"INDEX_NAME\",
+  \"INDEX_TBL_ID\",
+  \"LAST_ACCESS_TIME\",
+  \"ORIG_TBL_ID\",
+  \"SD_ID\"
+FROM
+  \"IDXS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@IDXS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `IDXS` (
+  `INDEX_ID` bigint,
+  `CREATE_TIME` int,
+  `DEFERRED_REBUILD` boolean,
+  `INDEX_HANDLER_CLASS` string,
+  `INDEX_NAME` string,
+  `INDEX_TBL_ID` bigint,
+  `LAST_ACCESS_TIME` int,
+  `ORIG_TBL_ID` bigint,
+  `SD_ID` bigint,
+  CONSTRAINT `SYS_PK_IDXS` PRIMARY KEY (`INDEX_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"INDEX_ID\",
+  \"CREATE_TIME\",
+  \"DEFERRED_REBUILD\",
+  \"INDEX_HANDLER_CLASS\",
+  \"INDEX_NAME\",
+  \"INDEX_TBL_ID\",
+  \"LAST_ACCESS_TIME\",
+  \"ORIG_TBL_ID\",
+  \"SD_ID\"
+FROM
+  \"IDXS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@IDXS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `INDEX_PARAMS` (
+  `INDEX_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_INDEX_PARAMS` PRIMARY KEY (`INDEX_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"INDEX_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"INDEX_PARAMS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@INDEX_PARAMS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `INDEX_PARAMS` (
+  `INDEX_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_INDEX_PARAMS` PRIMARY KEY (`INDEX_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"INDEX_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"INDEX_PARAMS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@INDEX_PARAMS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITIONS` (
+  `PART_ID` bigint,
+  `CREATE_TIME` int,
+  `LAST_ACCESS_TIME` int,
+  `PART_NAME` string,
+  `SD_ID` bigint,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_PARTITIONS` PRIMARY KEY (`PART_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"CREATE_TIME\",
+  \"LAST_ACCESS_TIME\",
+  \"PART_NAME\",
+  \"SD_ID\",
+  \"TBL_ID\"
+FROM
+  \"PARTITIONS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PARTITIONS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITIONS` (
+  `PART_ID` bigint,
+  `CREATE_TIME` int,
+  `LAST_ACCESS_TIME` int,
+  `PART_NAME` string,
+  `SD_ID` bigint,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_PARTITIONS` PRIMARY KEY (`PART_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"CREATE_TIME\",
+  \"LAST_ACCESS_TIME\",
+  \"PART_NAME\",
+  \"SD_ID\",
+  \"TBL_ID\"
+FROM
+  \"PARTITIONS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PARTITIONS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITION_KEYS` (
+  `TBL_ID` bigint,
+  `PKEY_COMMENT` string,
+  `PKEY_NAME` string,
+  `PKEY_TYPE` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_PARTITION_KEYS` PRIMARY KEY (`TBL_ID`,`PKEY_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"PKEY_COMMENT\",
+  \"PKEY_NAME\",
+  \"PKEY_TYPE\",
+  \"INTEGER_IDX\"
+FROM
+  \"PARTITION_KEYS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PARTITION_KEYS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITION_KEYS` (
+  `TBL_ID` bigint,
+  `PKEY_COMMENT` string,
+  `PKEY_NAME` string,
+  `PKEY_TYPE` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_PARTITION_KEYS` PRIMARY KEY (`TBL_ID`,`PKEY_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"PKEY_COMMENT\",
+  \"PKEY_NAME\",
+  \"PKEY_TYPE\",
+  \"INTEGER_IDX\"
+FROM
+  \"PARTITION_KEYS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PARTITION_KEYS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITION_KEY_VALS` (
+  `PART_ID` bigint,
+  `PART_KEY_VAL` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_PARTITION_KEY_VALS` PRIMARY KEY (`PART_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"PART_KEY_VAL\",
+  \"INTEGER_IDX\"
+FROM
+  \"PARTITION_KEY_VALS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PARTITION_KEY_VALS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITION_KEY_VALS` (
+  `PART_ID` bigint,
+  `PART_KEY_VAL` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_PARTITION_KEY_VALS` PRIMARY KEY (`PART_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"PART_KEY_VAL\",
+  \"INTEGER_IDX\"
+FROM
+  \"PARTITION_KEY_VALS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PARTITION_KEY_VALS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITION_PARAMS` (
+  `PART_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_PARTITION_PARAMS` PRIMARY KEY (`PART_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"PARTITION_PARAMS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PARTITION_PARAMS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PARTITION_PARAMS` (
+  `PART_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_PARTITION_PARAMS` PRIMARY KEY (`PART_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"PARTITION_PARAMS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PARTITION_PARAMS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PART_COL_PRIVS` (
+  `PART_COLUMN_GRANT_ID` bigint,
+  `COLUMN_NAME` string,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PART_ID` bigint,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `PART_COL_PRIV` string,
+  CONSTRAINT `SYS_PK_PART_COL_PRIVS` PRIMARY KEY (`PART_COLUMN_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_COLUMN_GRANT_ID\",
+  \"COLUMN_NAME\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PART_ID\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"PART_COL_PRIV\"
+FROM
+  \"PART_COL_PRIVS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PART_COL_PRIVS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PART_COL_PRIVS` (
+  `PART_COLUMN_GRANT_ID` bigint,
+  `COLUMN_NAME` string,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PART_ID` bigint,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `PART_COL_PRIV` string,
+  CONSTRAINT `SYS_PK_PART_COL_PRIVS` PRIMARY KEY (`PART_COLUMN_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_COLUMN_GRANT_ID\",
+  \"COLUMN_NAME\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PART_ID\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"PART_COL_PRIV\"
+FROM
+  \"PART_COL_PRIVS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PART_COL_PRIVS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PART_PRIVS` (
+  `PART_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PART_ID` bigint,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `PART_PRIV` string,
+  CONSTRAINT `SYS_PK_PART_PRIVS` PRIMARY KEY (`PART_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PART_ID\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"PART_PRIV\"
+FROM
+  \"PART_PRIVS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PART_PRIVS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PART_PRIVS` (
+  `PART_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PART_ID` bigint,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `PART_PRIV` string,
+  CONSTRAINT `SYS_PK_PART_PRIVS` PRIMARY KEY (`PART_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PART_ID\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"PART_PRIV\"
+FROM
+  \"PART_PRIVS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PART_PRIVS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `ROLES` (
+  `ROLE_ID` bigint,
+  `CREATE_TIME` int,
+  `OWNER_NAME` string,
+  `ROLE_NAME` string,
+  CONSTRAINT `SYS_PK_ROLES` PRIMARY KEY (`ROLE_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"ROLE_ID\",
+  \"CREATE_TIME\",
+  \"OWNER_NAME\",
+  \"ROLE_NAME\"
+FROM
+  \"ROLES\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@ROLES
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `ROLES` (
+  `ROLE_ID` bigint,
+  `CREATE_TIME` int,
+  `OWNER_NAME` string,
+  `ROLE_NAME` string,
+  CONSTRAINT `SYS_PK_ROLES` PRIMARY KEY (`ROLE_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"ROLE_ID\",
+  \"CREATE_TIME\",
+  \"OWNER_NAME\",
+  \"ROLE_NAME\"
+FROM
+  \"ROLES\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@ROLES
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `ROLE_MAP` (
+  `ROLE_GRANT_ID` bigint,
+  `ADD_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `ROLE_ID` bigint,
+  CONSTRAINT `SYS_PK_ROLE_MAP` PRIMARY KEY (`ROLE_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"ROLE_GRANT_ID\",
+  \"ADD_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"ROLE_ID\"
+FROM
+  \"ROLE_MAP\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@ROLE_MAP
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `ROLE_MAP` (
+  `ROLE_GRANT_ID` bigint,
+  `ADD_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `ROLE_ID` bigint,
+  CONSTRAINT `SYS_PK_ROLE_MAP` PRIMARY KEY (`ROLE_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"ROLE_GRANT_ID\",
+  \"ADD_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"ROLE_ID\"
+FROM
+  \"ROLE_MAP\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@ROLE_MAP
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SDS` (
+  `SD_ID` bigint,
+  `CD_ID` bigint,
+  `INPUT_FORMAT` string,
+  `IS_COMPRESSED` boolean,
+  `IS_STOREDASSUBDIRECTORIES` boolean,
+  `LOCATION` string,
+  `NUM_BUCKETS` int,
+  `OUTPUT_FORMAT` string,
+  `SERDE_ID` bigint,
+  CONSTRAINT `SYS_PK_SDS` PRIMARY KEY (`SD_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"CD_ID\",
+  \"INPUT_FORMAT\",
+  \"IS_COMPRESSED\",
+  \"IS_STOREDASSUBDIRECTORIES\",
+  \"LOCATION\",
+  \"NUM_BUCKETS\",
+  \"OUTPUT_FORMAT\",
+  \"SERDE_ID\"
+FROM
+  \"SDS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SDS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SDS` (
+  `SD_ID` bigint,
+  `CD_ID` bigint,
+  `INPUT_FORMAT` string,
+  `IS_COMPRESSED` boolean,
+  `IS_STOREDASSUBDIRECTORIES` boolean,
+  `LOCATION` string,
+  `NUM_BUCKETS` int,
+  `OUTPUT_FORMAT` string,
+  `SERDE_ID` bigint,
+  CONSTRAINT `SYS_PK_SDS` PRIMARY KEY (`SD_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"CD_ID\",
+  \"INPUT_FORMAT\",
+  \"IS_COMPRESSED\",
+  \"IS_STOREDASSUBDIRECTORIES\",
+  \"LOCATION\",
+  \"NUM_BUCKETS\",
+  \"OUTPUT_FORMAT\",
+  \"SERDE_ID\"
+FROM
+  \"SDS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SDS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SD_PARAMS` (
+  `SD_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_SD_PARAMS` PRIMARY KEY (`SD_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"SD_PARAMS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SD_PARAMS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SD_PARAMS` (
+  `SD_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_SD_PARAMS` PRIMARY KEY (`SD_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"SD_PARAMS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SD_PARAMS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SEQUENCE_TABLE` (
+  `SEQUENCE_NAME` string,
+  `NEXT_VAL` bigint,
+  CONSTRAINT `SYS_PK_SEQUENCE_TABLE` PRIMARY KEY (`SEQUENCE_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SEQUENCE_NAME\",
+  \"NEXT_VAL\"
+FROM
+  \"SEQUENCE_TABLE\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SEQUENCE_TABLE
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SEQUENCE_TABLE` (
+  `SEQUENCE_NAME` string,
+  `NEXT_VAL` bigint,
+  CONSTRAINT `SYS_PK_SEQUENCE_TABLE` PRIMARY KEY (`SEQUENCE_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SEQUENCE_NAME\",
+  \"NEXT_VAL\"
+FROM
+  \"SEQUENCE_TABLE\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SEQUENCE_TABLE
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SERDES` (
+  `SERDE_ID` bigint,
+  `NAME` string,
+  `SLIB` string,
+  CONSTRAINT `SYS_PK_SERDES` PRIMARY KEY (`SERDE_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SERDE_ID\",
+  \"NAME\",
+  \"SLIB\"
+FROM
+  \"SERDES\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SERDES
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SERDES` (
+  `SERDE_ID` bigint,
+  `NAME` string,
+  `SLIB` string,
+  CONSTRAINT `SYS_PK_SERDES` PRIMARY KEY (`SERDE_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SERDE_ID\",
+  \"NAME\",
+  \"SLIB\"
+FROM
+  \"SERDES\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SERDES
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SERDE_PARAMS` (
+  `SERDE_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_SERDE_PARAMS` PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SERDE_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"SERDE_PARAMS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SERDE_PARAMS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SERDE_PARAMS` (
+  `SERDE_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_SERDE_PARAMS` PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SERDE_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"SERDE_PARAMS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SERDE_PARAMS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_COL_NAMES` (
+  `SD_ID` bigint,
+  `SKEWED_COL_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_COL_NAMES` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"SKEWED_COL_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_COL_NAMES\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SKEWED_COL_NAMES
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_COL_NAMES` (
+  `SD_ID` bigint,
+  `SKEWED_COL_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_COL_NAMES` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"SKEWED_COL_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_COL_NAMES\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SKEWED_COL_NAMES
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_COL_VALUE_LOC_MAP` (
+  `SD_ID` bigint,
+  `STRING_LIST_ID_KID` bigint,
+  `LOCATION` string,
+  CONSTRAINT `SYS_PK_COL_VALUE_LOC_MAP` PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"STRING_LIST_ID_KID\",
+  \"LOCATION\"
+FROM
+  \"SKEWED_COL_VALUE_LOC_MAP\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SKEWED_COL_VALUE_LOC_MAP
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_COL_VALUE_LOC_MAP` (
+  `SD_ID` bigint,
+  `STRING_LIST_ID_KID` bigint,
+  `LOCATION` string,
+  CONSTRAINT `SYS_PK_COL_VALUE_LOC_MAP` PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"STRING_LIST_ID_KID\",
+  \"LOCATION\"
+FROM
+  \"SKEWED_COL_VALUE_LOC_MAP\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SKEWED_COL_VALUE_LOC_MAP
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST` (
+  `STRING_LIST_ID` bigint,
+  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST` PRIMARY KEY (`STRING_LIST_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"STRING_LIST_ID\"
+FROM
+  \"SKEWED_STRING_LIST\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SKEWED_STRING_LIST
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST` (
+  `STRING_LIST_ID` bigint,
+  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST` PRIMARY KEY (`STRING_LIST_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"STRING_LIST_ID\"
+FROM
+  \"SKEWED_STRING_LIST\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SKEWED_STRING_LIST
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST_VALUES` (
+  `STRING_LIST_ID` bigint,
+  `STRING_LIST_VALUE` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST_VALUES` PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"STRING_LIST_ID\",
+  \"STRING_LIST_VALUE\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_STRING_LIST_VALUES\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SKEWED_STRING_LIST_VALUES
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST_VALUES` (
+  `STRING_LIST_ID` bigint,
+  `STRING_LIST_VALUE` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST_VALUES` PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"STRING_LIST_ID\",
+  \"STRING_LIST_VALUE\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_STRING_LIST_VALUES\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SKEWED_STRING_LIST_VALUES
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_VALUES` (
+  `SD_ID_OID` bigint,
+  `STRING_LIST_ID_EID` bigint,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_VALUES` PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID_OID\",
+  \"STRING_LIST_ID_EID\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_VALUES\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SKEWED_VALUES
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SKEWED_VALUES` (
+  `SD_ID_OID` bigint,
+  `STRING_LIST_ID_EID` bigint,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_VALUES` PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID_OID\",
+  \"STRING_LIST_ID_EID\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_VALUES\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SKEWED_VALUES
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `SORT_COLS` (
+  `SD_ID` bigint,
+  `COLUMN_NAME` string,
+  `ORDER` int,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SORT_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"COLUMN_NAME\",
+  \"ORDER\",
+  \"INTEGER_IDX\"
+FROM
+  \"SORT_COLS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@SORT_COLS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `SORT_COLS` (
+  `SD_ID` bigint,
+  `COLUMN_NAME` string,
+  `ORDER` int,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SORT_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"COLUMN_NAME\",
+  \"ORDER\",
+  \"INTEGER_IDX\"
+FROM
+  \"SORT_COLS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@SORT_COLS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `TABLE_PARAMS` (
+  `TBL_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_TABLE_PARAMS` PRIMARY KEY (`TBL_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"TABLE_PARAMS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@TABLE_PARAMS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `TABLE_PARAMS` (
+  `TBL_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_TABLE_PARAMS` PRIMARY KEY (`TBL_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"TABLE_PARAMS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@TABLE_PARAMS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `TBLS` (
+  `TBL_ID` bigint,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `LAST_ACCESS_TIME` int,
+  `OWNER` string,
+  `RETENTION` int,
+  `SD_ID` bigint,
+  `TBL_NAME` string,
+  `TBL_TYPE` string,
+  `VIEW_EXPANDED_TEXT` string,
+  `VIEW_ORIGINAL_TEXT` string,
+  `IS_REWRITE_ENABLED` boolean,
+  CONSTRAINT `SYS_PK_TBLS` PRIMARY KEY (`TBL_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"LAST_ACCESS_TIME\",
+  \"OWNER\",
+  \"RETENTION\",
+  \"SD_ID\",
+  \"TBL_NAME\",
+  \"TBL_TYPE\",
+  \"VIEW_EXPANDED_TEXT\",
+  \"VIEW_ORIGINAL_TEXT\",
+  \"IS_REWRITE_ENABLED\"
+FROM TBLS"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@TBLS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `TBLS` (
+  `TBL_ID` bigint,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `LAST_ACCESS_TIME` int,
+  `OWNER` string,
+  `RETENTION` int,
+  `SD_ID` bigint,
+  `TBL_NAME` string,
+  `TBL_TYPE` string,
+  `VIEW_EXPANDED_TEXT` string,
+  `VIEW_ORIGINAL_TEXT` string,
+  `IS_REWRITE_ENABLED` boolean,
+  CONSTRAINT `SYS_PK_TBLS` PRIMARY KEY (`TBL_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"LAST_ACCESS_TIME\",
+  \"OWNER\",
+  \"RETENTION\",
+  \"SD_ID\",
+  \"TBL_NAME\",
+  \"TBL_TYPE\",
+  \"VIEW_EXPANDED_TEXT\",
+  \"VIEW_ORIGINAL_TEXT\",
+  \"IS_REWRITE_ENABLED\"
+FROM TBLS"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@TBLS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
+  `TBL_COLUMN_GRANT_ID` bigint,
+  `COLUMN_NAME` string,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `TBL_COL_PRIV` string,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_TBL_COL_PRIVS` PRIMARY KEY (`TBL_COLUMN_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_COLUMN_GRANT_ID\",
+  \"COLUMN_NAME\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"TBL_COL_PRIV\",
+  \"TBL_ID\"
+FROM
+  \"TBL_COL_PRIVS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@TBL_COL_PRIVS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
+  `TBL_COLUMN_GRANT_ID` bigint,
+  `COLUMN_NAME` string,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `TBL_COL_PRIV` string,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_TBL_COL_PRIVS` PRIMARY KEY (`TBL_COLUMN_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_COLUMN_GRANT_ID\",
+  \"COLUMN_NAME\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"TBL_COL_PRIV\",
+  \"TBL_ID\"
+FROM
+  \"TBL_COL_PRIVS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@TBL_COL_PRIVS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `TBL_PRIVS` (
+  `TBL_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `TBL_PRIV` string,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_TBL_PRIVS` PRIMARY KEY (`TBL_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"TBL_PRIV\",
+  \"TBL_ID\"
+FROM
+  \"TBL_PRIVS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@TBL_PRIVS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `TBL_PRIVS` (
+  `TBL_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `TBL_PRIV` string,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_TBL_PRIVS` PRIMARY KEY (`TBL_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"TBL_PRIV\",
+  \"TBL_ID\"
+FROM
+  \"TBL_PRIVS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@TBL_PRIVS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `TAB_COL_STATS` (
+ `CS_ID` bigint,
+ `DB_NAME` string,
+ `TABLE_NAME` string,
+ `COLUMN_NAME` string,
+ `COLUMN_TYPE` string,
+ `TBL_ID` bigint,
+ `LONG_LOW_VALUE` bigint,
+ `LONG_HIGH_VALUE` bigint,
+ `DOUBLE_HIGH_VALUE` double,
+ `DOUBLE_LOW_VALUE` double,
+ `BIG_DECIMAL_LOW_VALUE` string,
+ `BIG_DECIMAL_HIGH_VALUE` string,
+ `NUM_NULLS` bigint,
+ `NUM_DISTINCTS` bigint,
+ `AVG_COL_LEN` double,
+ `MAX_COL_LEN` bigint,
+ `NUM_TRUES` bigint,
+ `NUM_FALSES` bigint,
+ `LAST_ANALYZED` bigint,
+  CONSTRAINT `SYS_PK_TAB_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+ \"CS_ID\",
+ \"DB_NAME\",
+ \"TABLE_NAME\",
+ \"COLUMN_NAME\",
+ \"COLUMN_TYPE\",
+ \"TBL_ID\",
+ \"LONG_LOW_VALUE\",
+ \"LONG_HIGH_VALUE\",
+ \"DOUBLE_HIGH_VALUE\",
+ \"DOUBLE_LOW_VALUE\",
+ \"BIG_DECIMAL_LOW_VALUE\",
+ \"BIG_DECIMAL_HIGH_VALUE\",
+ \"NUM_NULLS\",
+ \"NUM_DISTINCTS\",
+ \"AVG_COL_LEN\",
+ \"MAX_COL_LEN\",
+ \"NUM_TRUES\",
+ \"NUM_FALSES\",
+ \"LAST_ANALYZED\"
+FROM
+  \"TAB_COL_STATS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@TAB_COL_STATS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `TAB_COL_STATS` (
+ `CS_ID` bigint,
+ `DB_NAME` string,
+ `TABLE_NAME` string,
+ `COLUMN_NAME` string,
+ `COLUMN_TYPE` string,
+ `TBL_ID` bigint,
+ `LONG_LOW_VALUE` bigint,
+ `LONG_HIGH_VALUE` bigint,
+ `DOUBLE_HIGH_VALUE` double,
+ `DOUBLE_LOW_VALUE` double,
+ `BIG_DECIMAL_LOW_VALUE` string,
+ `BIG_DECIMAL_HIGH_VALUE` string,
+ `NUM_NULLS` bigint,
+ `NUM_DISTINCTS` bigint,
+ `AVG_COL_LEN` double,
+ `MAX_COL_LEN` bigint,
+ `NUM_TRUES` bigint,
+ `NUM_FALSES` bigint,
+ `LAST_ANALYZED` bigint,
+  CONSTRAINT `SYS_PK_TAB_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+ \"CS_ID\",
+ \"DB_NAME\",
+ \"TABLE_NAME\",
+ \"COLUMN_NAME\",
+ \"COLUMN_TYPE\",
+ \"TBL_ID\",
+ \"LONG_LOW_VALUE\",
+ \"LONG_HIGH_VALUE\",
+ \"DOUBLE_HIGH_VALUE\",
+ \"DOUBLE_LOW_VALUE\",
+ \"BIG_DECIMAL_LOW_VALUE\",
+ \"BIG_DECIMAL_HIGH_VALUE\",
+ \"NUM_NULLS\",
+ \"NUM_DISTINCTS\",
+ \"AVG_COL_LEN\",
+ \"MAX_COL_LEN\",
+ \"NUM_TRUES\",
+ \"NUM_FALSES\",
+ \"LAST_ANALYZED\"
+FROM
+  \"TAB_COL_STATS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@TAB_COL_STATS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `PART_COL_STATS` (
+ `CS_ID` bigint,
+ `DB_NAME` string,
+ `TABLE_NAME` string,
+ `PARTITION_NAME` string,
+ `COLUMN_NAME` string,
+ `COLUMN_TYPE` string,
+ `PART_ID` bigint,
+ `LONG_LOW_VALUE` bigint,
+ `LONG_HIGH_VALUE` bigint,
+ `DOUBLE_HIGH_VALUE` double,
+ `DOUBLE_LOW_VALUE` double,
+ `BIG_DECIMAL_LOW_VALUE` string,
+ `BIG_DECIMAL_HIGH_VALUE` string,
+ `NUM_NULLS` bigint,
+ `NUM_DISTINCTS` bigint,
+ `AVG_COL_LEN` double,
+ `MAX_COL_LEN` bigint,
+ `NUM_TRUES` bigint,
+ `NUM_FALSES` bigint,
+ `LAST_ANALYZED` bigint,
+  CONSTRAINT `SYS_PK_PART_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+ \"CS_ID\",
+ \"DB_NAME\",
+ \"TABLE_NAME\",
+ \"PARTITION_NAME\",
+ \"COLUMN_NAME\",
+ \"COLUMN_TYPE\",
+ \"PART_ID\",
+ \"LONG_LOW_VALUE\",
+ \"LONG_HIGH_VALUE\",
+ \"DOUBLE_HIGH_VALUE\",
+ \"DOUBLE_LOW_VALUE\",
+ \"BIG_DECIMAL_LOW_VALUE\",
+ \"BIG_DECIMAL_HIGH_VALUE\",
+ \"NUM_NULLS\",
+ \"NUM_DISTINCTS\",
+ \"AVG_COL_LEN\",
+ \"MAX_COL_LEN\",
+ \"NUM_TRUES\",
+ \"NUM_FALSES\",
+ \"LAST_ANALYZED\"
+FROM
+  \"PART_COL_STATS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@PART_COL_STATS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `PART_COL_STATS` (
+ `CS_ID` bigint,
+ `DB_NAME` string,
+ `TABLE_NAME` string,
+ `PARTITION_NAME` string,
+ `COLUMN_NAME` string,
+ `COLUMN_TYPE` string,
+ `PART_ID` bigint,
+ `LONG_LOW_VALUE` bigint,
+ `LONG_HIGH_VALUE` bigint,
+ `DOUBLE_HIGH_VALUE` double,
+ `DOUBLE_LOW_VALUE` double,
+ `BIG_DECIMAL_LOW_VALUE` string,
+ `BIG_DECIMAL_HIGH_VALUE` string,
+ `NUM_NULLS` bigint,
+ `NUM_DISTINCTS` bigint,
+ `AVG_COL_LEN` double,
+ `MAX_COL_LEN` bigint,
+ `NUM_TRUES` bigint,
+ `NUM_FALSES` bigint,
+ `LAST_ANALYZED` bigint,
+  CONSTRAINT `SYS_PK_PART_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+ \"CS_ID\",
+ \"DB_NAME\",
+ \"TABLE_NAME\",
+ \"PARTITION_NAME\",
+ \"COLUMN_NAME\",
+ \"COLUMN_TYPE\",
+ \"PART_ID\",
+ \"LONG_LOW_VALUE\",
+ \"LONG_HIGH_VALUE\",
+ \"DOUBLE_HIGH_VALUE\",
+ \"DOUBLE_LOW_VALUE\",
+ \"BIG_DECIMAL_LOW_VALUE\",
+ \"BIG_DECIMAL_HIGH_VALUE\",
+ \"NUM_NULLS\",
+ \"NUM_DISTINCTS\",
+ \"AVG_COL_LEN\",
+ \"MAX_COL_LEN\",
+ \"NUM_TRUES\",
+ \"NUM_FALSES\",
+ \"LAST_ANALYZED\"
+FROM
+  \"PART_COL_STATS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@PART_COL_STATS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `VERSION` (
+  `VER_ID` BIGINT,
+  `SCHEMA_VERSION` string,
+  `VERSION_COMMENT` string,
+  CONSTRAINT `SYS_PK_VERSION` PRIMARY KEY (`VER_ID`) DISABLE NOVALIDATE
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@VERSION
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `VERSION` (
+  `VER_ID` BIGINT,
+  `SCHEMA_VERSION` string,
+  `VERSION_COMMENT` string,
+  CONSTRAINT `SYS_PK_VERSION` PRIMARY KEY (`VER_ID`) DISABLE NOVALIDATE
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@VERSION
+POSTHOOK: Output: database:sys
+PREHOOK: query: INSERT INTO `VERSION` VALUES (1, '3.0.0', 'Hive release version 3.0.0')
+PREHOOK: type: QUERY
+PREHOOK: Output: sys@version
+POSTHOOK: query: INSERT INTO `VERSION` VALUES (1, '3.0.0', 'Hive release version 3.0.0')
+POSTHOOK: type: QUERY
+POSTHOOK: Output: sys@version
+POSTHOOK: Lineage: version.schema_version SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: version.ver_id EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: version.version_comment SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `DB_VERSION` (
+  `VER_ID` BIGINT,
+  `SCHEMA_VERSION` string,
+  `VERSION_COMMENT` string,
+  CONSTRAINT `SYS_PK_DB_VERSION` PRIMARY KEY (`VER_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"VER_ID\",
+  \"SCHEMA_VERSION\",
+  \"VERSION_COMMENT\"
+FROM
+  \"VERSION\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@DB_VERSION
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `DB_VERSION` (
+  `VER_ID` BIGINT,
+  `SCHEMA_VERSION` string,
+  `VERSION_COMMENT` string,
+  CONSTRAINT `SYS_PK_DB_VERSION` PRIMARY KEY (`VER_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"VER_ID\",
+  \"SCHEMA_VERSION\",
+  \"VERSION_COMMENT\"
+FROM
+  \"VERSION\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@DB_VERSION
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `FUNCS` (
+  `FUNC_ID` bigint,
+  `CLASS_NAME` string,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `FUNC_NAME` string,
+  `FUNC_TYPE` int,
+  `OWNER_NAME` string,
+  `OWNER_TYPE` string,
+  CONSTRAINT `SYS_PK_FUNCS` PRIMARY KEY (`FUNC_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"FUNC_ID\",
+  \"CLASS_NAME\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"FUNC_NAME\",
+  \"FUNC_TYPE\",
+  \"OWNER_NAME\",
+  \"OWNER_TYPE\"
+FROM
+  \"FUNCS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@FUNCS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `FUNCS` (
+  `FUNC_ID` bigint,
+  `CLASS_NAME` string,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `FUNC_NAME` string,
+  `FUNC_TYPE` int,
+  `OWNER_NAME` string,
+  `OWNER_TYPE` string,
+  CONSTRAINT `SYS_PK_FUNCS` PRIMARY KEY (`FUNC_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"FUNC_ID\",
+  \"CLASS_NAME\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"FUNC_NAME\",
+  \"FUNC_TYPE\",
+  \"OWNER_NAME\",
+  \"OWNER_TYPE\"
+FROM
+  \"FUNCS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@FUNCS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
+(
+  `CHILD_CD_ID` bigint,
+  `CHILD_INTEGER_IDX` int,
+  `CHILD_TBL_ID` bigint,
+  `PARENT_CD_ID` bigint,
+  `PARENT_INTEGER_IDX` int,
+  `PARENT_TBL_ID` bigint,
+  `POSITION` bigint,
+  `CONSTRAINT_NAME` string,
+  `CONSTRAINT_TYPE` string,
+  `UPDATE_RULE` string,
+  `DELETE_RULE` string,
+  `ENABLE_VALIDATE_RELY` int,
+  CONSTRAINT `SYS_PK_KEY_CONSTRAINTS` PRIMARY KEY (`CONSTRAINT_NAME`, `POSITION`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CHILD_CD_ID\",
+  \"CHILD_INTEGER_IDX\",
+  \"CHILD_TBL_ID\",
+  \"PARENT_CD_ID\",
+  \"PARENT_INTEGER_IDX\",
+  \"PARENT_TBL_ID\",
+  \"POSITION\",
+  \"CONSTRAINT_NAME\",
+  \"CONSTRAINT_TYPE\",
+  \"UPDATE_RULE\",
+  \"DELETE_RULE\",
+  \"ENABLE_VALIDATE_RELY\"
+FROM
+  \"KEY_CONSTRAINTS\""
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@KEY_CONSTRAINTS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
+(
+  `CHILD_CD_ID` bigint,
+  `CHILD_INTEGER_IDX` int,
+  `CHILD_TBL_ID` bigint,
+  `PARENT_CD_ID` bigint,
+  `PARENT_INTEGER_IDX` int,
+  `PARENT_TBL_ID` bigint,
+  `POSITION` bigint,
+  `CONSTRAINT_NAME` string,
+  `CONSTRAINT_TYPE` string,
+  `UPDATE_RULE` string,
+  `DELETE_RULE` string,
+  `ENABLE_VALIDATE_RELY` int,
+  CONSTRAINT `SYS_PK_KEY_CONSTRAINTS` PRIMARY KEY (`CONSTRAINT_NAME`, `POSITION`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CHILD_CD_ID\",
+  \"CHILD_INTEGER_IDX\",
+  \"CHILD_TBL_ID\",
+  \"PARENT_CD_ID\",
+  \"PARENT_INTEGER_IDX\",
+  \"PARENT_TBL_ID\",
+  \"POSITION\",
+  \"CONSTRAINT_NAME\",
+  \"CONSTRAINT_TYPE\",
+  \"UPDATE_RULE\",
+  \"DELETE_RULE\",
+  \"ENABLE_VALIDATE_RELY\"
+FROM
+  \"KEY_CONSTRAINTS\""
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@KEY_CONSTRAINTS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE DATABASE INFORMATION_SCHEMA
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:INFORMATION_SCHEMA
+POSTHOOK: query: CREATE DATABASE INFORMATION_SCHEMA
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:INFORMATION_SCHEMA
+PREHOOK: query: USE INFORMATION_SCHEMA
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:information_schema
+POSTHOOK: query: USE INFORMATION_SCHEMA
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:information_schema
+PREHOOK: query: CREATE VIEW IF NOT EXISTS `SCHEMATA`
+(
+  `CATALOG_NAME`,
+  `SCHEMA_NAME`,
+  `SCHEMA_OWNER`,
+  `DEFAULT_CHARACTER_SET_CATALOG`,
+  `DEFAULT_CHARACTER_SET_SCHEMA`,
+  `DEFAULT_CHARACTER_SET_NAME`,
+  `SQL_PATH`
+) AS
+SELECT
+  'default',
+  `NAME`,
+  `OWNER_NAME`,
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  `DB_LOCATION_URI`
+FROM
+  sys.DBS
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: sys@dbs
+PREHOOK: Output: INFORMATION_SCHEMA@SCHEMATA
+PREHOOK: Output: database:information_schema
+POSTHOOK: query: CREATE VIEW IF NOT EXISTS `SCHEMATA`
+(
+  `CATALOG_NAME`,
+  `SCHEMA_NAME`,
+  `SCHEMA_OWNER`,
+  `DEFAULT_CHARACTER_SET_CATALOG`,
+  `DEFAULT_CHARACTER_SET_SCHEMA`,
+  `DEFAULT_CHARACTER_SET_NAME`,
+  `SQL_PATH`
+) AS
+SELECT
+  'default',
+  `NAME`,
+  `OWNER_NAME`,
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  `DB_LOCATION_URI`
+FROM
+  sys.DBS
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: sys@dbs
+POSTHOOK: Output: INFORMATION_SCHEMA@SCHEMATA
+POSTHOOK: Output: database:information_schema
+POSTHOOK: Lineage: SCHEMATA.catalog_name SIMPLE []
+POSTHOOK: Lineage: SCHEMATA.default_character_set_catalog EXPRESSION []
+POSTHOOK: Lineage: SCHEMATA.default_character_set_name EXPRESSION []
+POSTHOOK: Lineage: SCHEMATA.default_character_set_schema EXPRESSION []
+POSTHOOK: Lineage: SCHEMATA.schema_name SIMPLE [(dbs)dbs.FieldSchema(name:name, type:string, comment:from deserializer), ]
+#### A masked pattern was here ####
+POSTHOOK: Lineage: SCHEMATA.sql_path SIMPLE [(dbs)dbs.FieldSchema(name:db_location_uri, type:string, comment:from deserializer), ]
+PREHOOK: query: CREATE VIEW IF NOT EXISTS `TABLES`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `TABLE_TYPE`,
+  `SELF_REFERENCING_COLUMN_NAME`,
+  `REFERENCE_GENERATION`,
+  `USER_DEFINED_TYPE_CATALOG`,
+  `USER_DEFINED_TYPE_SCHEMA`,
+  `USER_DEFINED_TYPE_NAME`,
+  `IS_INSERTABLE_INTO`,
+  `IS_TYPED`,
+  `COMMIT_ACTION`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'VIEW', 'BASE_TABLE'),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'NO', 'YES'),
+  'NO',
+  cast(null as string)
+FROM
+  `sys`.`TBLS` T, `sys`.`DBS` D
+WHERE
+  D.`DB_ID` = T.`DB_ID`
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@tbls
+PREHOOK: Output: INFORMATION_SCHEMA@TABLES
+PREHOOK: Output: database:information_schema
+POSTHOOK: query: CREATE VIEW IF NOT EXISTS `TABLES`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `TABLE_TYPE`,
+  `SELF_REFERENCING_COLUMN_NAME`,
+  `REFERENCE_GENERATION`,
+  `USER_DEFINED_TYPE_CATALOG`,
+  `USER_DEFINED_TYPE_SCHEMA`,
+  `USER_DEFINED_TYPE_NAME`,
+  `IS_INSERTABLE_INTO`,
+  `IS_TYPED`,
+  `COMMIT_ACTION`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'VIEW', 'BASE_TABLE'),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'NO', 'YES'),
+  'NO',
+  cast(null as string)
+FROM
+  `sys`.`TBLS` T, `sys`.`DBS` D
+WHERE
+  D.`DB_ID` = T.`DB_ID`
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@tbls
+POSTHOOK: Output: INFORMATION_SCHEMA@TABLES
+POSTHOOK: Output: database:information_schema
+POSTHOOK: Lineage: TABLES.commit_action EXPRESSION []
+POSTHOOK: Lineage: TABLES.is_insertable_into EXPRESSION [(tbls)t.FieldSchema(name:view_original_text, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLES.is_typed SIMPLE []
+POSTHOOK: Lineage: TABLES.reference_generation EXPRESSION []
+POSTHOOK: Lineage: TABLES.self_referencing_column_name EXPRESSION []
+POSTHOOK: Lineage: TABLES.table_catalog SIMPLE []
+POSTHOOK: Lineage: TABLES.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLES.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLES.table_type EXPRESSION [(tbls)t.FieldSchema(name:view_original_text, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLES.user_defined_type_catalog EXPRESSION []
+POSTHOOK: Lineage: TABLES.user_defined_type_name EXPRESSION []
+POSTHOOK: Lineage: TABLES.user_defined_type_schema EXPRESSION []
+PREHOOK: query: CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES`
+(
+  `GRANTOR`,
+  `GRANTEE`,
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `PRIVILEGE_TYPE`,
+  `IS_GRANTABLE`,
+  `WITH_HIERARCHY`
+) AS
+SELECT
+  `GRANTOR`,
+  `PRINCIPAL_NAME`,
+  'default',
+  D.`NAME`,
+  T.`TBL_NAME`,
+  P.`TBL_PRIV`,
+  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
+  'NO'
+FROM
+  sys.`TBL_PRIVS` P,
+  sys.`TBLS` T,
+  sys.`DBS` D
+WHERE
+  P.TBL_ID = T.TBL_ID
+  AND T.DB_ID = D.DB_ID
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@tbl_privs
+PREHOOK: Input: sys@tbls
+PREHOOK: Output: INFORMATION_SCHEMA@TABLE_PRIVILEGES
+PREHOOK: Output: database:information_schema
+POSTHOOK: query: CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES`
+(
+  `GRANTOR`,
+  `GRANTEE`,
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `PRIVILEGE_TYPE`,
+  `IS_GRANTABLE`,
+  `WITH_HIERARCHY`
+) AS
+SELECT
+  `GRANTOR`,
+  `PRINCIPAL_NAME`,
+  'default',
+  D.`NAME`,
+  T.`TBL_NAME`,
+  P.`TBL_PRIV`,
+  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
+  'NO'
+FROM
+  sys.`TBL_PRIVS` P,
+  sys.`TBLS` T,
+  sys.`DBS` D
+WHERE
+  P.TBL_ID = T.TBL_ID
+  AND T.DB_ID = D.DB_ID
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@tbl_privs
+POSTHOOK: Input: sys@tbls
+POSTHOOK: Output: INFORMATION_SCHEMA@TABLE_PRIVILEGES
+POSTHOOK: Output: database:information_schema
+POSTHOOK: Lineage: TABLE_PRIVILEGES.grantee SIMPLE [(tbl_privs)p.FieldSchema(name:principal_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLE_PRIVILEGES.grantor SIMPLE [(tbl_privs)p.FieldSchema(name:grantor, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLE_PRIVILEGES.is_grantable EXPRESSION [(tbl_privs)p.FieldSchema(name:grant_option, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLE_PRIVILEGES.privilege_type SIMPLE [(tbl_privs)p.FieldSchema(name:tbl_priv, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLE_PRIVILEGES.table_catalog SIMPLE []
+POSTHOOK: Lineage: TABLE_PRIVILEGES.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLE_PRIVILEGES.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: TABLE_PRIVILEGES.with_hierarchy SIMPLE []
+PREHOOK: query: CREATE VIEW IF NOT EXISTS `COLUMNS`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `COLUMN_NAME`,
+  `ORDINAL_POSITION`,
+  `COLUMN_DEFAULT`,
+  `IS_NULLABLE`,
+  `DATA_TYPE`,
+  `CHARACTER_MAXIMUM_LENGTH`,
+  `CHARACTER_OCTET_LENGTH`,
+  `NUMERIC_PRECISION`,
+  `NUMERIC_PRECISION_RADIX`,
+  `NUMERIC_SCALE`,
+  `DATETIME_PRECISION`,
+  `INTERVAL_TYPE`,
+  `INTERVAL_PRECISION`,
+  `CHARACTER_SET_CATALOG`,
+  `CHARACTER_SET_SCHEMA`,
+  `CHARACTER_SET_NAME`,
+  `COLLATION_CATALOG`,
+  `COLLATION_SCHEMA`,
+  `COLLATION_NAME`,
+  `UDT_CATALOG`,
+  `UDT_SCHEMA`,
+  `UDT_NAME`,
+  `SCOPE_CATALOG`,
+  `SCOPE_SCHEMA`,
+  `SCOPE_NAME`,
+  `MAXIMUM_CARDINALITY`,
+  `DTD_IDENTIFIER`,
+  `IS_SELF_REFERENCING`,
+  `IS_IDENTITY`,
+  `IDENTITY_GENERATION`,
+  `IDENTITY_START`,
+  `IDENTITY_INCREMENT`,
+  `IDENTITY_MAXIMUM`,
+  `IDENTITY_MINIMUM`,
+  `IDENTITY_CYCLE`,
+  `IS_GENERATED`,
+  `GENERATION_EXPRESSION`,
+  `IS_SYSTEM_TIME_PERIOD_START`,
+  `IS_SYSTEM_TIME_PERIOD_END`,
+  `SYSTEM_TIME_PERIOD_TIMESTAMP_GENERATION`,
+  `IS_UPDATABLE`,
+  `DECLARED_DATA_TYPE`,
+  `DECLARED_NUMERIC_PRECISION`,
+  `DECLARED_NUMERIC_SCALE`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  C.COLUMN_NAME,
+  C.INTEGER_IDX,
+  cast (null as string),
+  'YES',
+  C.TYPE_NAME as TYPE_NAME,
+  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+),(\\d+)',2)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+),(\\d+)',2)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'date' THEN 0
+       WHEN lower(C.TYPE_NAME) = 'timestamp' THEN 9
+       ELSE null END,
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  C.CD_ID,
+  'NO',
+  'NO',
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  'NEVER',
+  cast (null as string),
+  'NO',
+  'NO',
+  cast (null as string),
+  'YES',
+  C.TYPE_NAME as DECLARED_DATA_TYPE,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+       ELSE null END
+FROM
+  sys.`COLUMNS_V2` C,
+  sys.`SDS` S,
+  sys.`TBLS` T,
+  sys.`DBS` D
+WHERE
+  S.`SD_ID` = T.`SD_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND C.`CD_ID` = S.`CD_ID`
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: sys@columns_v2
+PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@sds
+PREHOOK: Input: sys@tbls
+PREHOOK: Output: INFORMATION_SCHEMA@COLUMNS
+PREHOOK: Output: database:information_schema
+POSTHOOK: query: CREATE VIEW IF NOT EXISTS `COLUMNS`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `COLUMN_NAME`,
+  `ORDINAL_POSITION`,
+  `COLUMN_DEFAULT`,
+  `IS_NULLABLE`,
+  `DATA_TYPE`,
+  `CHARACTER_MAXIMUM_LENGTH`,
+  `CHARACTER_OCTET_LENGTH`,
+  `NUMERIC_PRECISION`,
+  `NUMERIC_PRECISION_RADIX`,
+  `NUMERIC_SCALE`,
+  `DATETIME_PRECISION`,
+  `INTERVAL_TYPE`,
+  `INTERVAL_PRECISION`,
+  `CHARACTER_SET_CATALOG`,
+  `CHARACTER_SET_SCHEMA`,
+  `CHARACTER_SET_NAME`,
+  `COLLATION_CATALOG`,
+  `COLLATION_SCHEMA`,
+  `COLLATION_NAME`,
+  `UDT_CATALOG`,
+  `UDT_SCHEMA`,
+  `UDT_NAME`,
+  `SCOPE_CATALOG`,
+  `SCOPE_SCHEMA`,
+  `SCOPE_NAME`,
+  `MAXIMUM_CARDINALITY`,
+  `DTD_IDENTIFIER`,
+  `IS_SELF_REFERENCING`,
+  `IS_IDENTITY`,
+  `IDENTITY_GENERATION`,
+  `IDENTITY_START`,
+  `IDENTITY_INCREMENT`,
+  `IDENTITY_MAXIMUM`,
+  `IDENTITY_MINIMUM`,
+  `IDENTITY_CYCLE`,
+  `IS_GENERATED`,
+  `GENERATION_EXPRESSION`,
+  `IS_SYSTEM_TIME_PERIOD_START`,
+  `IS_SYSTEM_TIME_PERIOD_END`,
+  `SYSTEM_TIME_PERIOD_TIMESTAMP_GENERATION`,
+  `IS_UPDATABLE`,
+  `DECLARED_DATA_TYPE`,
+  `DECLARED_NUMERIC_PRECISION`,
+  `DECLARED_NUMERIC_SCALE`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  C.COLUMN_NAME,
+  C.INTEGER_IDX,
+  cast (null as string),
+  'YES',
+  C.TYPE_NAME as TYPE_NAME,
+  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+),(\\d+)',2)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+),(\\d+)',2)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'date' THEN 0
+       WHEN lower(C.TYPE_NAME) = 'timestamp' THEN 9
+       ELSE null END,
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  C.CD_ID,
+  'NO',
+  'NO',
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  'NEVER',
+  cast (null as string),
+  'NO',
+  'NO',
+  cast (null as string),
+  'YES',
+  C.TYPE_NAME as DECLARED_DATA_TYPE,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+       ELSE null END
+FROM
+  sys.`COLUMNS_V2` C,
+  sys.`SDS` S,
+  sys.`TBLS` T,
+  sys.`DBS` D
+WHERE
+  S.`SD_ID` = T.`SD_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND C.`CD_ID` = S.`CD_ID`
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: sys@columns_v2
+POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@sds
+POSTHOOK: Input: sys@tbls
+POSTHOOK: Output: INFORMATION_SCHEMA@COLUMNS
+POSTHOOK: Output: database:information_schema
+POSTHOOK: Lineage: COLUMNS.character_maximum_length EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.character_octet_length EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.character_set_catalog EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.character_set_name EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.character_set_schema EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.collation_catalog EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.collation_name EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.collation_schema EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.column_default EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.column_name SIMPLE [(columns_v2)c.FieldSchema(name:column_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.data_type SIMPLE [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.datetime_precision EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.declared_data_type SIMPLE [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.declared_numeric_precision EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.declared_numeric_scale EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.dtd_identifier SIMPLE [(columns_v2)c.FieldSchema(name:cd_id, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.generation_expression EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.identity_cycle EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.identity_generation EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.identity_increment EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.identity_maximum EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.identity_minimum EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.identity_start EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.interval_precision EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.interval_type EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.is_generated SIMPLE []
+POSTHOOK: Lineage: COLUMNS.is_identity SIMPLE []
+POSTHOOK: Lineage: COLUMNS.is_nullable SIMPLE []
+POSTHOOK: Lineage: COLUMNS.is_self_referencing SIMPLE []
+POSTHOOK: Lineage: COLUMNS.is_system_time_period_end SIMPLE []
+POSTHOOK: Lineage: COLUMNS.is_system_time_period_start SIMPLE []
+POSTHOOK: Lineage: COLUMNS.is_updatable SIMPLE []
+POSTHOOK: Lineage: COLUMNS.maximum_cardinality EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.numeric_precision EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.numeric_precision_radix EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.numeric_scale EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.ordinal_position SIMPLE [(columns_v2)c.FieldSchema(name:integer_idx, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.scope_catalog EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.scope_name EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.scope_schema EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.system_time_period_timestamp_generation EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.table_catalog SIMPLE []
+POSTHOOK: Lineage: COLUMNS.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMNS.udt_catalog EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.udt_name EXPRESSION []
+POSTHOOK: Lineage: COLUMNS.udt_schema EXPRESSION []
+PREHOOK: query: CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES`
+(
+  `GRANTOR`,
+  `GRANTEE`,
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `COLUMN_NAME`,
+  `PRIVILEGE_TYPE`,
+  `IS_GRANTABLE`
+) AS
+SELECT
+  `GRANTOR`,
+  `PRINCIPAL_NAME`,
+  'default',
+  D.`NAME`,
+  T.`TBL_NAME`,
+  C.`COLUMN_NAME`,
+  P.`TBL_COL_PRIV`,
+  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
+FROM
+  sys.`TBL_COL_PRIVS` P,
+  sys.`TBLS` T,
+  sys.`DBS` D,
+  sys.`COLUMNS_V2` C,
+  sys.`SDS` S
+WHERE
+  S.`SD_ID` = T.`SD_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND P.`TBL_ID` = T.`TBL_ID`
+  AND P.`COLUMN_NAME` = C.`COLUMN_NAME`
+  AND C.`CD_ID` = S.`CD_ID`
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: sys@columns_v2
+PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@sds
+PREHOOK: Input: sys@tbl_col_privs
+PREHOOK: Input: sys@tbls
+PREHOOK: Output: INFORMATION_SCHEMA@COLUMN_PRIVILEGES
+PREHOOK: Output: database:information_schema
+POSTHOOK: query: CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES`
+(
+  `GRANTOR`,
+  `GRANTEE`,
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `COLUMN_NAME`,
+  `PRIVILEGE_TYPE`,
+  `IS_GRANTABLE`
+) AS
+SELECT
+  `GRANTOR`,
+  `PRINCIPAL_NAME`,
+  'default',
+  D.`NAME`,
+  T.`TBL_NAME`,
+  C.`COLUMN_NAME`,
+  P.`TBL_COL_PRIV`,
+  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
+FROM
+  sys.`TBL_COL_PRIVS` P,
+  sys.`TBLS` T,
+  sys.`DBS` D,
+  sys.`COLUMNS_V2` C,
+  sys.`SDS` S
+WHERE
+  S.`SD_ID` = T.`SD_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND P.`TBL_ID` = T.`TBL_ID`
+  AND P.`COLUMN_NAME` = C.`COLUMN_NAME`
+  AND C.`CD_ID` = S.`CD_ID`
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: sys@columns_v2
+POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@sds
+POSTHOOK: Input: sys@tbl_col_privs
+POSTHOOK: Input: sys@tbls
+POSTHOOK: Output: INFORMATION_SCHEMA@COLUMN_PRIVILEGES
+POSTHOOK: Output: database:information_schema
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.column_name SIMPLE [(columns_v2)c.FieldSchema(name:column_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.grantee SIMPLE [(tbl_col_privs)p.FieldSchema(name:principal_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.grantor SIMPLE [(tbl_col_privs)p.FieldSchema(name:grantor, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.is_grantable EXPRESSION [(tbl_col_privs)p.FieldSchema(name:grant_option, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.privilege_type SIMPLE [(tbl_col_privs)p.FieldSchema(name:tbl_col_priv, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.table_catalog SIMPLE []
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
+PREHOOK: query: CREATE VIEW IF NOT EXISTS `VIEWS`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `VIEW_DEFINITION`,
+  `CHECK_OPTION`,
+  `IS_UPDATABLE`,
+  `IS_INSERTABLE_INTO`,
+  `IS_TRIGGER_UPDATABLE`,
+  `IS_TRIGGER_DELETABLE`,
+  `IS_TRIGGER_INSERTABLE_INTO`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  T.VIEW_ORIGINAL_TEXT,
+  CAST(NULL as string),
+  false,
+  false,
+  false,
+  false,
+  false
+FROM
+  `sys`.`DBS` D,
+  `sys`.`TBLS` T
+WHERE
+   D.`DB_ID` = T.`DB_ID` AND
+   length(T.VIEW_ORIGINAL_TEXT) > 0
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@tbls
+PREHOOK: Output: INFORMATION_SCHEMA@VIEWS
+PREHOOK: Output: database:information_schema
+POSTHOOK: query: CREATE VIEW IF NOT EXISTS `VIEWS`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `VIEW_DEFINITION`,
+  `CHECK_OPTION`,
+  `IS_UPDATABLE`,
+  `IS_INSERTABLE_INTO`,
+  `IS_TRIGGER_UPDATABLE`,
+  `IS_TRIGGER_DELETABLE`,
+  `IS_TRIGGER_INSERTABLE_INTO`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  T.VIEW_ORIGINAL_TEXT,
+  CAST(NULL as string),
+  false,
+  false,
+  false,
+  false,
+  false
+FROM
+  `sys`.`DBS` D,
+  `sys`.`TBLS` T
+WHERE
+   D.`DB_ID` = T.`DB_ID` AND
+   length(T.VIEW_ORIGINAL_TEXT) > 0
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@tbls
+POSTHOOK: Output: INFORMATION_SCHEMA@VIEWS
+POSTHOOK: Output: database:information_schema
+POSTHOOK: Lineage: VIEWS.check_option EXPRESSION []
+POSTHOOK: Lineage: VIEWS.is_insertable_into SIMPLE []
+POSTHOOK: Lineage: VIEWS.is_trigger_deletable SIMPLE []
+POSTHOOK: Lineage: VIEWS.is_trigger_insertable_into SIMPLE []
+POSTHOOK: Lineage: VIEWS.is_trigger_updatable SIMPLE []
+POSTHOOK: Lineage: VIEWS.is_updatable SIMPLE []
+POSTHOOK: Lineage: VIEWS.table_catalog SIMPLE []
+POSTHOOK: Lineage: VIEWS.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: VIEWS.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: VIEWS.view_definition SIMPLE [(tbls)t.FieldSchema(name:view_original_text, type:string, comment:from deserializer), ]
+PREHOOK: query: use sys
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:sys
+POSTHOOK: query: use sys
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:sys
+PREHOOK: query: select bucket_col_name, integer_idx from bucketing_cols order by bucket_col_name, integer_idx limit 5
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@bucketing_cols
+#### A masked pattern was here ####
+POSTHOOK: query: select bucket_col_name, integer_idx from bucketing_cols order by bucket_col_name, integer_idx limit 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@bucketing_cols
+#### A masked pattern was here ####
+key	0
+key	0
+key	0
+value	0
+PREHOOK: query: select count(*) from cds
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@cds
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from cds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@cds
+#### A masked pattern was here ####
+63
+PREHOOK: query: select column_name, type_name, integer_idx from columns_v2 order by column_name, integer_idx limit 5
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@columns_v2
+#### A masked pattern was here ####
+POSTHOOK: query: select column_name, type_name, integer_idx from columns_v2 order by column_name,

<TRUNCATED>

[45/50] [abbrv] hive git commit: HIVE-16413: Create table as select does not check ownership of the location (Niklaus Xiao via Zoltan Haindrich)

Posted by we...@apache.org.
HIVE-16413: Create table as select does not check ownership of the location (Niklaus Xiao via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/91948ec0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/91948ec0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/91948ec0

Branch: refs/heads/hive-14535
Commit: 91948ec00b1b642b401bfb529f211eb59b8b16ad
Parents: 23e703f
Author: Niklaus Xiao <st...@live.cn>
Authored: Tue May 16 08:36:05 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Tue May 16 08:36:05 2017 +0200

----------------------------------------------------------------------
 .../security/authorization/plugin/sqlstd/Operation2Privilege.java   | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/91948ec0/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index 18b0e1c..9688f8c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -302,6 +302,7 @@ public class Operation2Privilege {
 (SEL_NOGRANT_AR, null));
     op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newPrivRequirementList(
         new PrivRequirement(SEL_NOGRANT_AR, IOType.INPUT),
+        new PrivRequirement(OWNER_INS_SEL_DEL_NOGRANT_AR, HivePrivilegeObjectType.DFS_URI),
         new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));
 
     // QUERY,LOAD op can contain an insert & overwrite,


[06/50] [abbrv] hive git commit: HIVE-16658: TestTimestampTZ.java has missed the ASF header (Saijin Huang via Rui)

Posted by we...@apache.org.
HIVE-16658: TestTimestampTZ.java has missed the ASF header (Saijin Huang via Rui)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f2fa83cd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f2fa83cd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f2fa83cd

Branch: refs/heads/hive-14535
Commit: f2fa83cd1552e85526ff02a6f3473f5a36bfde8f
Parents: 4291c46
Author: Saijin Huang <hu...@zte.com.cn>
Authored: Sat May 13 12:16:45 2017 +0800
Committer: Rui Li <li...@apache.org>
Committed: Sat May 13 12:16:45 2017 +0800

----------------------------------------------------------------------
 .../hadoop/hive/common/type/TestTimestampTZ.java  | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f2fa83cd/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
index 739850a..0cef77a 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.common.type;
 
 import org.junit.Assert;


[25/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/intersect_merge.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/intersect_merge.q.out b/ql/src/test/results/clientpositive/llap/intersect_merge.q.out
index 6217785..a312966 100644
--- a/ql/src/test/results/clientpositive/llap/intersect_merge.q.out
+++ b/ql/src/test/results/clientpositive/llap/intersect_merge.q.out
@@ -1736,11 +1736,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE), Union 7 (CONTAINS)
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 4 <- Union 3 (SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
-        Reducer 8 <- Union 3 (CONTAINS), Union 7 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE), Union 6 (CONTAINS)
+        Reducer 7 <- Union 3 (CONTAINS), Union 6 (SIMPLE_EDGE)
+        Reducer 9 <- Map 8 (SIMPLE_EDGE), Union 6 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1764,75 +1764,47 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
                         Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col2 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: int), value (type: int)
                     outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(1)
                       keys: _col0 (type: int), _col1 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
-                        Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col2 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
-                  alias: b
-                  Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                  alias: a
+                  Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: int), value (type: int)
                     outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(1)
                       keys: _col0 (type: int), _col1 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
-                        Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col2 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: int), KEY._col1 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count(_col2)
-                  keys: _col0 (type: int), _col1 (type: int)
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 3 Data size: 9 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: int), _col1 (type: int)
-                    sort order: ++
-                    Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
-                    Statistics: Num rows: 3 Data size: 9 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col2 (type: bigint)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -1884,7 +1856,7 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1892,7 +1864,7 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: count(_col2)
                   keys: _col0 (type: int), _col1 (type: int)
@@ -1905,7 +1877,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
                     Statistics: Num rows: 3 Data size: 9 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col2 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1939,10 +1911,31 @@ STAGE PLANS:
                           Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
                           Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
                           value expressions: _col2 (type: bigint), _col3 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int), KEY._col1 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(_col2)
+                  keys: _col0 (type: int), _col1 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 3 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int), _col1 (type: int)
+                    sort order: ++
+                    Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
+                    Statistics: Num rows: 3 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col2 (type: bigint)
         Union 3 
             Vertex: Union 3
-        Union 7 
-            Vertex: Union 7
+        Union 6 
+            Vertex: Union 6
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/join46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join46.q.out b/ql/src/test/results/clientpositive/llap/join46.q.out
index 56f6862..f8636e8 100644
--- a/ql/src/test/results/clientpositive/llap/join46.q.out
+++ b/ql/src/test/results/clientpositive/llap/join46.q.out
@@ -2062,9 +2062,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2082,47 +2082,33 @@ STAGE PLANS:
                       Map-reduce partition columns: _col1 (type: int)
                       Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: int), _col2 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: test2
-                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
                     outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col1 (type: int)
                       sort order: +
                       Map-reduce partition columns: _col1 (type: int)
-                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: int), _col2 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
         Map 5 
             Map Operator Tree:
                 TableScan
-                  alias: test1
-                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
                     outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col1 (type: int)
                       sort order: +
                       Map-reduce partition columns: _col1 (type: int)
-                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: int), _col2 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: test2
-                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: int), value (type: int), col_2 (type: string)
                     outputColumnNames: _col0, _col1, _col2
@@ -2170,7 +2156,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/limit_join_transpose.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/limit_join_transpose.q.out b/ql/src/test/results/clientpositive/llap/limit_join_transpose.q.out
index 61b5c12..4474581 100644
--- a/ql/src/test/results/clientpositive/llap/limit_join_transpose.q.out
+++ b/ql/src/test/results/clientpositive/llap/limit_join_transpose.q.out
@@ -222,7 +222,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Map 4 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -242,13 +242,6 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -348,8 +341,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Map 5 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Map 6 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -369,13 +362,6 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: src3
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -386,13 +372,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col1 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: src1
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -518,10 +497,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 7 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -539,13 +518,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: src2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -558,13 +530,6 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src3
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -598,7 +563,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -614,7 +579,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col1 (type: string)
                     Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: string)
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -633,7 +598,7 @@ STAGE PLANS:
                     Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 0.1
                     value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -708,9 +673,9 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Map 7 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -728,9 +693,19 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      TopN Hash Memory Usage: 0.1
+                      value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 4 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: src3
@@ -746,23 +721,6 @@ STAGE PLANS:
                       Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      TopN Hash Memory Usage: 0.1
-                      value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -797,6 +755,22 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 4 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 1
+                  Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col1 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col1 (type: string)
+                    Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col0 (type: string)
         Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
@@ -834,22 +808,6 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: string)
-        Reducer 8 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Limit
-                  Number of rows: 1
-                  Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col1 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col1 (type: string)
-                    Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: string)
 
   Stage: Stage-0
     Fetch Operator
@@ -1154,7 +1112,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Map 4 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1175,13 +1133,6 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1282,8 +1233,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Map 5 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Map 6 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1304,13 +1255,6 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: src3
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1321,13 +1265,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col1 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: src1
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1454,10 +1391,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 7 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1475,13 +1412,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: src2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1495,13 +1425,6 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.1
                         value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src3
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1536,7 +1459,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1553,7 +1476,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col1 (type: string)
                     Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: string)
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -1573,7 +1496,7 @@ STAGE PLANS:
                     Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 0.1
                     value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1648,9 +1571,9 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Map 7 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1668,9 +1591,19 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      TopN Hash Memory Usage: 0.1
+                      value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 4 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: src3
@@ -1686,23 +1619,6 @@ STAGE PLANS:
                       Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: src2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      TopN Hash Memory Usage: 0.1
-                      value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -1738,6 +1654,23 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 4 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 1
+                  Offset of rows: 1
+                  Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col1 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col1 (type: string)
+                    Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col0 (type: string)
         Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
@@ -1776,23 +1709,6 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: string)
-        Reducer 8 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Limit
-                  Number of rows: 1
-                  Offset of rows: 1
-                  Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col1 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col1 (type: string)
-                    Statistics: Num rows: 1 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: string)
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/limit_pushdown.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/limit_pushdown.q.out b/ql/src/test/results/clientpositive/llap/limit_pushdown.q.out
index dd54dd2..0a8df61 100644
--- a/ql/src/test/results/clientpositive/llap/limit_pushdown.q.out
+++ b/ql/src/test/results/clientpositive/llap/limit_pushdown.q.out
@@ -905,9 +905,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -932,13 +932,6 @@ STAGE PLANS:
                         Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
                         TopN Hash Memory Usage: 0.3
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string)
                     outputColumnNames: _col0
@@ -1000,7 +993,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1017,7 +1010,7 @@ STAGE PLANS:
                     Statistics: Num rows: 3 Data size: 285 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 0.3
                     value expressions: _col0 (type: string), _col1 (type: bigint)
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out b/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
index 7d01c69..1e39acc 100644
--- a/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
@@ -157,8 +157,8 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -178,13 +178,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
-            Execution mode: vectorized, llap
-            LLAP IO: no inputs
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: src_orc
-                  Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string)
                     outputColumnNames: _col0
@@ -197,7 +190,7 @@ STAGE PLANS:
                         Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col0 (type: string)
             Execution mode: vectorized, llap
-            LLAP IO: no inputs
+            LLAP IO: all inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -216,7 +209,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
+        Reducer 3 
             Execution mode: vectorized, llap
             Reduce Operator Tree:
               Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mapjoin46.q.out b/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
index 73960ce..71f235a 100644
--- a/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
@@ -1827,7 +1827,7 @@ NULL	NULL	None	NULL	NULL	NULL
 101	2	Car	102	2	Del
 101	2	Car	103	2	Ema
 NULL	NULL	NULL	104	3	Fli
-Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
+Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: EXPLAIN
 SELECT *
 FROM (
@@ -1877,9 +1877,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Map 2 <- Map 1 (BROADCAST_EDGE)
-        Map 4 <- Map 5 (BROADCAST_EDGE)
-        Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE), Map 4 (CUSTOM_SIMPLE_EDGE)
+        Map 1 <- Map 4 (BROADCAST_EDGE)
+        Map 3 <- Map 1 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1897,26 +1897,19 @@ STAGE PLANS:
                       Map-reduce partition columns: _col1 (type: int)
                       Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: int), _col2 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 2 
-            Map Operator Tree:
-                TableScan
-                  alias: test2
-                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
                     outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
-                           Right Outer Join0 to 1
+                           Left Outer Join0 to 1
                       keys:
                         0 _col1 (type: int)
                         1 _col1 (type: int)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       input vertices:
-                        0 Map 1
+                        1 Map 4
                       residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
                       Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -1925,24 +1918,24 @@ STAGE PLANS:
                         value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 4 
+        Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: test1
-                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
                     outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
-                           Left Outer Join0 to 1
+                           Right Outer Join0 to 1
                       keys:
                         0 _col1 (type: int)
                         1 _col1 (type: int)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       input vertices:
-                        1 Map 5
+                        0 Map 1
                       residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
                       Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -1951,7 +1944,7 @@ STAGE PLANS:
                         value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 5 
+        Map 4 
             Map Operator Tree:
                 TableScan
                   alias: test2
@@ -1968,7 +1961,7 @@ STAGE PLANS:
                       value expressions: _col0 (type: int), _col2 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 3 
+        Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -1994,7 +1987,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
+Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: SELECT *
 FROM (
   SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/mrr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mrr.q.out b/ql/src/test/results/clientpositive/llap/mrr.q.out
index 1437d5d..726349c 100644
--- a/ql/src/test/results/clientpositive/llap/mrr.q.out
+++ b/ql/src/test/results/clientpositive/llap/mrr.q.out
@@ -1292,14 +1292,14 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
-        Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+        Reducer 6 <- Map 1 (SIMPLE_EDGE)
+        Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+        Reducer 8 <- Map 1 (SIMPLE_EDGE)
+        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1322,13 +1322,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1344,13 +1337,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1368,33 +1354,6 @@ STAGE PLANS:
                         value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col1 (type: bigint)
-                  sort order: +
-                  Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string)
-        Reducer 11 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: bigint)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col1 (type: bigint)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -1458,7 +1417,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1475,7 +1434,7 @@ STAGE PLANS:
                     sort order: +
                     Statistics: Num rows: 68 Data size: 6460 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col0 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1488,6 +1447,33 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 68 Data size: 6460 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: bigint)
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col1 (type: bigint)
+                  sort order: +
+                  Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: string)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: bigint)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col1 (type: bigint)
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/multiMapJoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/multiMapJoin2.q.out b/ql/src/test/results/clientpositive/llap/multiMapJoin2.q.out
index e9c016e..b4b0e93 100644
--- a/ql/src/test/results/clientpositive/llap/multiMapJoin2.q.out
+++ b/ql/src/test/results/clientpositive/llap/multiMapJoin2.q.out
@@ -722,12 +722,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Map 1 <- Map 5 (BROADCAST_EDGE)
-        Map 6 <- Map 8 (BROADCAST_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (BROADCAST_EDGE)
+        Map 1 <- Map 6 (BROADCAST_EDGE), Map 7 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -750,7 +749,7 @@ STAGE PLANS:
                           1 _col0 (type: string)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 5
+                          1 Map 6
                         Statistics: Num rows: 60 Data size: 5220 Basic stats: COMPLETE Column stats: COMPLETE
                         Group By Operator
                           keys: _col0 (type: string)
@@ -762,32 +761,6 @@ STAGE PLANS:
                             sort order: +
                             Map-reduce partition columns: _col0 (type: string)
                             Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: y2
-                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: x1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
@@ -803,7 +776,7 @@ STAGE PLANS:
                           1 _col0 (type: string)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 8
+                          1 Map 7
                         Statistics: Num rows: 60 Data size: 5220 Basic stats: COMPLETE Column stats: COMPLETE
                         Group By Operator
                           keys: _col0 (type: string)
@@ -817,7 +790,26 @@ STAGE PLANS:
                             Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 8 
+        Map 6 
+            Map Operator Tree:
+                TableScan
+                  alias: y2
+                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: y1
@@ -852,7 +844,7 @@ STAGE PLANS:
                     1 _col0 (type: string)
                   outputColumnNames: _col0
                   input vertices:
-                    1 Reducer 7
+                    1 Reducer 5
                   Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: count()
@@ -893,7 +885,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -990,12 +982,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Map 1 <- Map 5 (BROADCAST_EDGE)
-        Map 6 <- Map 8 (BROADCAST_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (BROADCAST_EDGE)
+        Map 1 <- Map 6 (BROADCAST_EDGE), Map 7 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1018,7 +1009,7 @@ STAGE PLANS:
                           1 _col0 (type: string)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 5
+                          1 Map 6
                         Statistics: Num rows: 60 Data size: 5220 Basic stats: COMPLETE Column stats: COMPLETE
                         Group By Operator
                           keys: _col0 (type: string)
@@ -1030,32 +1021,6 @@ STAGE PLANS:
                             sort order: +
                             Map-reduce partition columns: _col0 (type: string)
                             Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: y2
-                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: x1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1071,7 +1036,7 @@ STAGE PLANS:
                           1 _col0 (type: string)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 8
+                          1 Map 7
                         Statistics: Num rows: 60 Data size: 5220 Basic stats: COMPLETE Column stats: COMPLETE
                         Group By Operator
                           keys: _col0 (type: string)
@@ -1085,7 +1050,26 @@ STAGE PLANS:
                             Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 8 
+        Map 6 
+            Map Operator Tree:
+                TableScan
+                  alias: y2
+                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: y1
@@ -1120,7 +1104,7 @@ STAGE PLANS:
                     1 _col0 (type: string)
                   outputColumnNames: _col0
                   input vertices:
-                    1 Reducer 7
+                    1 Reducer 5
                   Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: count()
@@ -1161,7 +1145,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1259,10 +1243,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Map 1 <- Map 5 (BROADCAST_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (BROADCAST_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1316,13 +1300,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: x1
-                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1354,7 +1331,7 @@ STAGE PLANS:
                     1 _col0 (type: string)
                   outputColumnNames: _col1
                   input vertices:
-                    1 Reducer 7
+                    1 Reducer 6
                   Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: count()
@@ -1395,7 +1372,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1493,10 +1470,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Map 1 <- Map 5 (BROADCAST_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 7 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (BROADCAST_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1550,13 +1527,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: x1
-                  Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1588,7 +1558,7 @@ STAGE PLANS:
                     1 _col0 (type: string)
                   outputColumnNames: _col1
                   input vertices:
-                    1 Reducer 7
+                    1 Reducer 6
                   Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     aggregations: count()
@@ -1629,7 +1599,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1879,8 +1849,8 @@ STAGE PLANS:
       Edges:
         Map 1 <- Map 4 (BROADCAST_EDGE), Union 2 (CONTAINS)
         Map 4 <- Map 5 (BROADCAST_EDGE)
-        Map 6 <- Map 7 (BROADCAST_EDGE), Union 2 (CONTAINS)
-        Map 7 <- Map 8 (BROADCAST_EDGE)
+        Map 5 <- Map 7 (BROADCAST_EDGE)
+        Map 6 <- Map 5 (BROADCAST_EDGE), Union 2 (CONTAINS)
         Reducer 3 <- Union 2 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -1963,13 +1933,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: c
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1979,24 +1942,30 @@ STAGE PLANS:
                       Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                       Map Join Operator
                         condition map:
-                             Left Semi Join 0 to 1
+                             Inner Join 0 to 1
                         keys:
                           0 _col0 (type: string)
                           1 _col0 (type: string)
                         outputColumnNames: _col0
                         input vertices:
                           1 Map 7
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Statistics: Num rows: 1000 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 1219 Data size: 106053 Basic stats: COMPLETE Column stats: COMPLETE
+                        Group By Operator
+                          keys: _col0 (type: string)
+                          mode: hash
+                          outputColumnNames: _col0
+                          Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
+                          Reduce Output Operator
+                            key expressions: _col0 (type: string)
+                            sort order: +
+                            Map-reduce partition columns: _col0 (type: string)
+                            Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
-                  alias: a
+                  alias: c
                   Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
@@ -2007,27 +1976,21 @@ STAGE PLANS:
                       Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                       Map Join Operator
                         condition map:
-                             Inner Join 0 to 1
+                             Left Semi Join 0 to 1
                         keys:
                           0 _col0 (type: string)
                           1 _col0 (type: string)
                         outputColumnNames: _col0
                         input vertices:
-                          1 Map 8
-                        Statistics: Num rows: 1219 Data size: 106053 Basic stats: COMPLETE Column stats: COMPLETE
-                        Group By Operator
-                          keys: _col0 (type: string)
-                          mode: hash
-                          outputColumnNames: _col0
-                          Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
-                          Reduce Output Operator
-                            key expressions: _col0 (type: string)
-                            sort order: +
-                            Map-reduce partition columns: _col0 (type: string)
-                            Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
+                          1 Map 5
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Statistics: Num rows: 1000 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 8 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: b

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out b/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
index 83de1fb..77062c7 100644
--- a/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
+++ b/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
@@ -1299,9 +1299,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1319,13 +1319,6 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                       TopN Hash Memory Usage: 2.0E-5
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: key (type: string)
                     outputColumnNames: _col0
@@ -1382,7 +1375,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -1400,7 +1393,7 @@ STAGE PLANS:
                     Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 2.0E-5
                     value expressions: _col0 (type: string), _col1 (type: bigint)
-        Reducer 6 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator


[15/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query33.q.out b/ql/src/test/results/clientpositive/perf/query33.q.out
index 342bd90..00c021b 100644
--- a/ql/src/test/results/clientpositive/perf/query33.q.out
+++ b/ql/src/test/results/clientpositive/perf/query33.q.out
@@ -147,26 +147,26 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
-Reducer 12 <- Map 14 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 16 <- Map 15 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
-Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
-Reducer 18 <- Reducer 17 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 20 <- Map 19 (SIMPLE_EDGE)
-Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 24 (SIMPLE_EDGE)
-Reducer 23 <- Map 25 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
-Reducer 27 <- Map 26 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
-Reducer 28 <- Reducer 27 (SIMPLE_EDGE), Reducer 34 (SIMPLE_EDGE)
-Reducer 29 <- Reducer 28 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 31 <- Map 30 (SIMPLE_EDGE)
-Reducer 33 <- Map 32 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
-Reducer 34 <- Map 36 (SIMPLE_EDGE), Reducer 33 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 11 <- Map 1 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
+Reducer 12 <- Reducer 11 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 15 <- Map 14 (SIMPLE_EDGE)
+Reducer 16 <- Map 14 (SIMPLE_EDGE)
+Reducer 17 <- Map 14 (SIMPLE_EDGE)
+Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 21 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 20 <- Map 26 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 27 (SIMPLE_EDGE)
+Reducer 23 <- Map 26 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
+Reducer 24 <- Map 21 (SIMPLE_EDGE), Map 28 (SIMPLE_EDGE)
+Reducer 25 <- Map 26 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
-Reducer 9 <- Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 23 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -183,50 +183,20 @@ Stage-0
               Group By Operator [GBY_117] (rows=335408073 width=108)
                 Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
               <-Union 5 [SIMPLE_EDGE]
-                <-Reducer 18 [CONTAINS]
+                <-Reducer 10 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
                     Group By Operator [GBY_115] (rows=670816147 width=108)
                       Output:["_col0","_col1"],aggregations:["sum(_col1)"],keys:_col0
                       Group By Operator [GBY_72] (rows=191657247 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 17 [SIMPLE_EDGE]
+                      <-Reducer 9 [SIMPLE_EDGE]
                         SHUFFLE [RS_71]
                           PartitionCols:_col0
                           Group By Operator [GBY_70] (rows=383314495 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_184] (rows=383314495 width=135)
                               Conds:RS_66._col0=RS_67._col4(Inner),Output:["_col1","_col8"]
-                            <-Reducer 16 [SIMPLE_EDGE]
-                              SHUFFLE [RS_66]
-                                PartitionCols:_col0
-                                Merge Join Operator [MERGEJOIN_177] (rows=508200 width=1436)
-                                  Conds:RS_63._col1=RS_64._col0(Inner),Output:["_col0","_col1"]
-                                <-Map 15 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_63]
-                                    PartitionCols:_col1
-                                    Select Operator [SEL_39] (rows=462000 width=1436)
-                                      Output:["_col0","_col1"]
-                                      Filter Operator [FIL_164] (rows=462000 width=1436)
-                                        predicate:(i_manufact_id is not null and i_item_sk is not null)
-                                        TableScan [TS_37] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_manufact_id"]
-                                <-Reducer 20 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_64]
-                                    PartitionCols:_col0
-                                    Group By Operator [GBY_45] (rows=115500 width=1436)
-                                      Output:["_col0"],keys:KEY._col0
-                                    <-Map 19 [SIMPLE_EDGE]
-                                      SHUFFLE [RS_44]
-                                        PartitionCols:_col0
-                                        Group By Operator [GBY_43] (rows=231000 width=1436)
-                                          Output:["_col0"],keys:i_manufact_id
-                                          Select Operator [SEL_42] (rows=231000 width=1436)
-                                            Output:["i_manufact_id"]
-                                            Filter Operator [FIL_165] (rows=231000 width=1436)
-                                              predicate:((i_category) IN ('Books') and i_manufact_id is not null)
-                                              TableScan [TS_40] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_category","i_manufact_id"]
                             <-Reducer 23 [SIMPLE_EDGE]
                               SHUFFLE [RS_67]
                                 PartitionCols:_col4
@@ -234,14 +204,14 @@ Stage-0
                                   Output:["_col4","_col5"]
                                   Merge Join Operator [MERGEJOIN_179] (rows=348467716 width=135)
                                     Conds:RS_59._col1=RS_60._col0(Inner),Output:["_col2","_col3"]
-                                  <-Map 25 [SIMPLE_EDGE]
+                                  <-Map 26 [SIMPLE_EDGE]
                                     SHUFFLE [RS_60]
                                       PartitionCols:_col0
                                       Select Operator [SEL_55] (rows=20000000 width=1014)
                                         Output:["_col0"]
                                         Filter Operator [FIL_168] (rows=20000000 width=1014)
                                           predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
-                                          TableScan [TS_53] (rows=40000000 width=1014)
+                                          TableScan [TS_16] (rows=40000000 width=1014)
                                             default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
                                   <-Reducer 22 [SIMPLE_EDGE]
                                     SHUFFLE [RS_59]
@@ -249,6 +219,15 @@ Stage-0
                                       Merge Join Operator [MERGEJOIN_178] (rows=316788826 width=135)
                                         Conds:RS_56._col0=RS_57._col0(Inner),Output:["_col1","_col2","_col3"]
                                       <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_57]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_52] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_167] (rows=18262 width=1119)
+                                              predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
+                                              TableScan [TS_13] (rows=73049 width=1119)
+                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                      <-Map 27 [SIMPLE_EDGE]
                                         SHUFFLE [RS_56]
                                           PartitionCols:_col0
                                           Select Operator [SEL_49] (rows=287989836 width=135)
@@ -257,49 +236,69 @@ Stage-0
                                               predicate:(cs_sold_date_sk is not null and cs_bill_addr_sk is not null and cs_item_sk is not null)
                                               TableScan [TS_47] (rows=287989836 width=135)
                                                 default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_addr_sk","cs_item_sk","cs_ext_sales_price"]
-                                      <-Map 24 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_57]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_52] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_167] (rows=18262 width=1119)
-                                              predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
-                                              TableScan [TS_50] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                <-Reducer 29 [CONTAINS]
+                            <-Reducer 8 [SIMPLE_EDGE]
+                              SHUFFLE [RS_66]
+                                PartitionCols:_col0
+                                Merge Join Operator [MERGEJOIN_177] (rows=508200 width=1436)
+                                  Conds:RS_63._col1=RS_64._col0(Inner),Output:["_col0","_col1"]
+                                <-Map 1 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_63]
+                                    PartitionCols:_col1
+                                    Select Operator [SEL_39] (rows=462000 width=1436)
+                                      Output:["_col0","_col1"]
+                                      Filter Operator [FIL_164] (rows=462000 width=1436)
+                                        predicate:(i_manufact_id is not null and i_item_sk is not null)
+                                        TableScan [TS_0] (rows=462000 width=1436)
+                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_manufact_id"]
+                                <-Reducer 16 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_64]
+                                    PartitionCols:_col0
+                                    Group By Operator [GBY_45] (rows=115500 width=1436)
+                                      Output:["_col0"],keys:KEY._col0
+                                    <-Map 14 [SIMPLE_EDGE]
+                                      SHUFFLE [RS_44]
+                                        PartitionCols:_col0
+                                        Group By Operator [GBY_43] (rows=231000 width=1436)
+                                          Output:["_col0"],keys:i_manufact_id
+                                          Select Operator [SEL_42] (rows=231000 width=1436)
+                                            Output:["i_manufact_id"]
+                                            Filter Operator [FIL_165] (rows=231000 width=1436)
+                                              predicate:((i_category) IN ('Books') and i_manufact_id is not null)
+                                              TableScan [TS_3] (rows=462000 width=1436)
+                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_category","i_manufact_id"]
+                <-Reducer 13 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
                     Group By Operator [GBY_115] (rows=670816147 width=108)
                       Output:["_col0","_col1"],aggregations:["sum(_col1)"],keys:_col0
                       Group By Operator [GBY_111] (rows=95833781 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 28 [SIMPLE_EDGE]
+                      <-Reducer 12 [SIMPLE_EDGE]
                         SHUFFLE [RS_110]
                           PartitionCols:_col0
                           Group By Operator [GBY_109] (rows=191667562 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_185] (rows=191667562 width=135)
                               Conds:RS_105._col0=RS_106._col3(Inner),Output:["_col1","_col8"]
-                            <-Reducer 27 [SIMPLE_EDGE]
+                            <-Reducer 11 [SIMPLE_EDGE]
                               SHUFFLE [RS_105]
                                 PartitionCols:_col0
                                 Merge Join Operator [MERGEJOIN_180] (rows=508200 width=1436)
                                   Conds:RS_102._col1=RS_103._col0(Inner),Output:["_col0","_col1"]
-                                <-Map 26 [SIMPLE_EDGE]
+                                <-Map 1 [SIMPLE_EDGE]
                                   SHUFFLE [RS_102]
                                     PartitionCols:_col1
                                     Select Operator [SEL_78] (rows=462000 width=1436)
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_169] (rows=462000 width=1436)
                                         predicate:(i_manufact_id is not null and i_item_sk is not null)
-                                        TableScan [TS_76] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_manufact_id"]
-                                <-Reducer 31 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_0]
+                                <-Reducer 17 [SIMPLE_EDGE]
                                   SHUFFLE [RS_103]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_84] (rows=115500 width=1436)
                                       Output:["_col0"],keys:KEY._col0
-                                    <-Map 30 [SIMPLE_EDGE]
+                                    <-Map 14 [SIMPLE_EDGE]
                                       SHUFFLE [RS_83]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_82] (rows=231000 width=1436)
@@ -308,30 +307,36 @@ Stage-0
                                             Output:["i_manufact_id"]
                                             Filter Operator [FIL_170] (rows=231000 width=1436)
                                               predicate:((i_category) IN ('Books') and i_manufact_id is not null)
-                                              TableScan [TS_79] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_category","i_manufact_id"]
-                            <-Reducer 34 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_3]
+                            <-Reducer 25 [SIMPLE_EDGE]
                               SHUFFLE [RS_106]
                                 PartitionCols:_col3
                                 Select Operator [SEL_101] (rows=174243235 width=135)
                                   Output:["_col3","_col5"]
                                   Merge Join Operator [MERGEJOIN_182] (rows=174243235 width=135)
                                     Conds:RS_98._col2=RS_99._col0(Inner),Output:["_col1","_col3"]
-                                  <-Map 36 [SIMPLE_EDGE]
+                                  <-Map 26 [SIMPLE_EDGE]
                                     SHUFFLE [RS_99]
                                       PartitionCols:_col0
                                       Select Operator [SEL_94] (rows=20000000 width=1014)
                                         Output:["_col0"]
                                         Filter Operator [FIL_173] (rows=20000000 width=1014)
                                           predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
-                                          TableScan [TS_92] (rows=40000000 width=1014)
-                                            default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
-                                  <-Reducer 33 [SIMPLE_EDGE]
+                                           Please refer to the previous TableScan [TS_16]
+                                  <-Reducer 24 [SIMPLE_EDGE]
                                     SHUFFLE [RS_98]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_181] (rows=158402938 width=135)
                                         Conds:RS_95._col0=RS_96._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 32 [SIMPLE_EDGE]
+                                      <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_96]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_91] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_172] (rows=18262 width=1119)
+                                              predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_13]
+                                      <-Map 28 [SIMPLE_EDGE]
                                         SHUFFLE [RS_95]
                                           PartitionCols:_col0
                                           Select Operator [SEL_88] (rows=144002668 width=135)
@@ -340,15 +345,6 @@ Stage-0
                                               predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null and ws_item_sk is not null)
                                               TableScan [TS_86] (rows=144002668 width=135)
                                                 default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_bill_addr_sk","ws_ext_sales_price"]
-                                      <-Map 35 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_96]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_91] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_172] (rows=18262 width=1119)
-                                              predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
-                                              TableScan [TS_89] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                 <-Reducer 4 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
@@ -363,45 +359,6 @@ Stage-0
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_183] (rows=766650239 width=88)
                               Conds:RS_29._col0=RS_30._col3(Inner),Output:["_col1","_col8"]
-                            <-Reducer 12 [SIMPLE_EDGE]
-                              SHUFFLE [RS_30]
-                                PartitionCols:_col3
-                                Select Operator [SEL_25] (rows=696954748 width=88)
-                                  Output:["_col3","_col5"]
-                                  Merge Join Operator [MERGEJOIN_176] (rows=696954748 width=88)
-                                    Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col3"]
-                                  <-Map 14 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_23]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_18] (rows=20000000 width=1014)
-                                        Output:["_col0"]
-                                        Filter Operator [FIL_163] (rows=20000000 width=1014)
-                                          predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
-                                          TableScan [TS_16] (rows=40000000 width=1014)
-                                            default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
-                                  <-Reducer 11 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_22]
-                                      PartitionCols:_col2
-                                      Merge Join Operator [MERGEJOIN_175] (rows=633595212 width=88)
-                                        Conds:RS_19._col0=RS_20._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 10 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_19]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_12] (rows=575995635 width=88)
-                                            Output:["_col0","_col1","_col2","_col3"]
-                                            Filter Operator [FIL_161] (rows=575995635 width=88)
-                                              predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null and ss_item_sk is not null)
-                                              TableScan [TS_10] (rows=575995635 width=88)
-                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
-                                      <-Map 13 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_20]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_15] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_162] (rows=18262 width=1119)
-                                              predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
-                                              TableScan [TS_13] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                             <-Reducer 2 [SIMPLE_EDGE]
                               SHUFFLE [RS_29]
                                 PartitionCols:_col0
@@ -414,14 +371,13 @@ Stage-0
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_159] (rows=462000 width=1436)
                                         predicate:(i_manufact_id is not null and i_item_sk is not null)
-                                        TableScan [TS_0] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_manufact_id"]
-                                <-Reducer 9 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_0]
+                                <-Reducer 15 [SIMPLE_EDGE]
                                   SHUFFLE [RS_27]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_8] (rows=115500 width=1436)
                                       Output:["_col0"],keys:KEY._col0
-                                    <-Map 8 [SIMPLE_EDGE]
+                                    <-Map 14 [SIMPLE_EDGE]
                                       SHUFFLE [RS_7]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_6] (rows=231000 width=1436)
@@ -430,6 +386,42 @@ Stage-0
                                             Output:["i_manufact_id"]
                                             Filter Operator [FIL_160] (rows=231000 width=1436)
                                               predicate:((i_category) IN ('Books') and i_manufact_id is not null)
-                                              TableScan [TS_3] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_category","i_manufact_id"]
+                                               Please refer to the previous TableScan [TS_3]
+                            <-Reducer 20 [SIMPLE_EDGE]
+                              SHUFFLE [RS_30]
+                                PartitionCols:_col3
+                                Select Operator [SEL_25] (rows=696954748 width=88)
+                                  Output:["_col3","_col5"]
+                                  Merge Join Operator [MERGEJOIN_176] (rows=696954748 width=88)
+                                    Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col3"]
+                                  <-Map 26 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_23]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_18] (rows=20000000 width=1014)
+                                        Output:["_col0"]
+                                        Filter Operator [FIL_163] (rows=20000000 width=1014)
+                                          predicate:((ca_gmt_offset = -6) and ca_address_sk is not null)
+                                           Please refer to the previous TableScan [TS_16]
+                                  <-Reducer 19 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_22]
+                                      PartitionCols:_col2
+                                      Merge Join Operator [MERGEJOIN_175] (rows=633595212 width=88)
+                                        Conds:RS_19._col0=RS_20._col0(Inner),Output:["_col1","_col2","_col3"]
+                                      <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_20]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_15] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_162] (rows=18262 width=1119)
+                                              predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_13]
+                                      <-Map 18 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_19]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_12] (rows=575995635 width=88)
+                                            Output:["_col0","_col1","_col2","_col3"]
+                                            Filter Operator [FIL_161] (rows=575995635 width=88)
+                                              predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null and ss_item_sk is not null)
+                                              TableScan [TS_10] (rows=575995635 width=88)
+                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query38.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query38.q.out b/ql/src/test/results/clientpositive/perf/query38.q.out
index 133363f..ae9ada5 100644
--- a/ql/src/test/results/clientpositive/perf/query38.q.out
+++ b/ql/src/test/results/clientpositive/perf/query38.q.out
@@ -45,17 +45,17 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 14 (SIMPLE_EDGE)
-Reducer 12 <- Map 15 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 17 <- Map 16 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
-Reducer 18 <- Map 21 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-Reducer 19 <- Reducer 18 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 10 <- Map 15 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Reducer 10 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 12 <- Map 17 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 13 <- Map 15 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 14 <- Reducer 13 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
-Reducer 3 <- Map 9 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 3 <- Map 15 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (CUSTOM_SIMPLE_EDGE)
+Reducer 9 <- Map 16 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -79,7 +79,7 @@ Stage-0
                       Group By Operator [GBY_83] (rows=152458212 width=108)
                         Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
                       <-Union 5 [SIMPLE_EDGE]
-                        <-Reducer 13 [CONTAINS]
+                        <-Reducer 11 [CONTAINS]
                           Reduce Output Operator [RS_82]
                             PartitionCols:_col0, _col1, _col2
                             Group By Operator [GBY_81] (rows=304916424 width=108)
@@ -88,7 +88,7 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
                                 Group By Operator [GBY_44] (rows=174233858 width=135)
                                   Output:["_col0","_col1","_col2"],keys:KEY._col0, KEY._col1, KEY._col2
-                                <-Reducer 12 [SIMPLE_EDGE]
+                                <-Reducer 10 [SIMPLE_EDGE]
                                   SHUFFLE [RS_43]
                                     PartitionCols:_col0, _col1, _col2
                                     Group By Operator [GBY_42] (rows=348467716 width=135)
@@ -102,14 +102,23 @@ Stage-0
                                             Output:["_col0","_col1","_col2"]
                                             Filter Operator [FIL_113] (rows=80000000 width=860)
                                               predicate:c_customer_sk is not null
-                                              TableScan [TS_32] (rows=80000000 width=860)
+                                              TableScan [TS_6] (rows=80000000 width=860)
                                                 default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_first_name","c_last_name"]
-                                      <-Reducer 11 [SIMPLE_EDGE]
+                                      <-Reducer 9 [SIMPLE_EDGE]
                                         SHUFFLE [RS_38]
                                           PartitionCols:_col1
                                           Merge Join Operator [MERGEJOIN_120] (rows=316788826 width=135)
                                             Conds:RS_35._col0=RS_36._col0(Inner),Output:["_col1","_col3"]
-                                          <-Map 10 [SIMPLE_EDGE]
+                                          <-Map 8 [SIMPLE_EDGE]
+                                            SHUFFLE [RS_36]
+                                              PartitionCols:_col0
+                                              Select Operator [SEL_31] (rows=8116 width=1119)
+                                                Output:["_col0","_col1"]
+                                                Filter Operator [FIL_112] (rows=8116 width=1119)
+                                                  predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                                  TableScan [TS_3] (rows=73049 width=1119)
+                                                    default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
+                                          <-Map 16 [SIMPLE_EDGE]
                                             SHUFFLE [RS_35]
                                               PartitionCols:_col0
                                               Select Operator [SEL_28] (rows=287989836 width=135)
@@ -118,16 +127,7 @@ Stage-0
                                                   predicate:(cs_sold_date_sk is not null and cs_bill_customer_sk is not null)
                                                   TableScan [TS_26] (rows=287989836 width=135)
                                                     default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_customer_sk"]
-                                          <-Map 14 [SIMPLE_EDGE]
-                                            SHUFFLE [RS_36]
-                                              PartitionCols:_col0
-                                              Select Operator [SEL_31] (rows=8116 width=1119)
-                                                Output:["_col0","_col1"]
-                                                Filter Operator [FIL_112] (rows=8116 width=1119)
-                                                  predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                  TableScan [TS_29] (rows=73049 width=1119)
-                                                    default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
-                        <-Reducer 19 [CONTAINS]
+                        <-Reducer 14 [CONTAINS]
                           Reduce Output Operator [RS_82]
                             PartitionCols:_col0, _col1, _col2
                             Group By Operator [GBY_81] (rows=304916424 width=108)
@@ -136,28 +136,35 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
                                 Group By Operator [GBY_71] (rows=87121617 width=135)
                                   Output:["_col0","_col1","_col2"],keys:KEY._col0, KEY._col1, KEY._col2
-                                <-Reducer 18 [SIMPLE_EDGE]
+                                <-Reducer 13 [SIMPLE_EDGE]
                                   SHUFFLE [RS_70]
                                     PartitionCols:_col0, _col1, _col2
                                     Group By Operator [GBY_69] (rows=174243235 width=135)
                                       Output:["_col0","_col1","_col2"],keys:_col7, _col6, _col3
                                       Merge Join Operator [MERGEJOIN_123] (rows=174243235 width=135)
                                         Conds:RS_65._col1=RS_66._col0(Inner),Output:["_col3","_col6","_col7"]
-                                      <-Map 21 [SIMPLE_EDGE]
+                                      <-Map 15 [SIMPLE_EDGE]
                                         SHUFFLE [RS_66]
                                           PartitionCols:_col0
                                           Select Operator [SEL_61] (rows=80000000 width=860)
                                             Output:["_col0","_col1","_col2"]
                                             Filter Operator [FIL_116] (rows=80000000 width=860)
                                               predicate:c_customer_sk is not null
-                                              TableScan [TS_59] (rows=80000000 width=860)
-                                                default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_first_name","c_last_name"]
-                                      <-Reducer 17 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_6]
+                                      <-Reducer 12 [SIMPLE_EDGE]
                                         SHUFFLE [RS_65]
                                           PartitionCols:_col1
                                           Merge Join Operator [MERGEJOIN_122] (rows=158402938 width=135)
                                             Conds:RS_62._col0=RS_63._col0(Inner),Output:["_col1","_col3"]
-                                          <-Map 16 [SIMPLE_EDGE]
+                                          <-Map 8 [SIMPLE_EDGE]
+                                            SHUFFLE [RS_63]
+                                              PartitionCols:_col0
+                                              Select Operator [SEL_58] (rows=8116 width=1119)
+                                                Output:["_col0","_col1"]
+                                                Filter Operator [FIL_115] (rows=8116 width=1119)
+                                                  predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                                   Please refer to the previous TableScan [TS_3]
+                                          <-Map 17 [SIMPLE_EDGE]
                                             SHUFFLE [RS_62]
                                               PartitionCols:_col0
                                               Select Operator [SEL_55] (rows=144002668 width=135)
@@ -166,15 +173,6 @@ Stage-0
                                                   predicate:(ws_sold_date_sk is not null and ws_bill_customer_sk is not null)
                                                   TableScan [TS_53] (rows=144002668 width=135)
                                                     default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_bill_customer_sk"]
-                                          <-Map 20 [SIMPLE_EDGE]
-                                            SHUFFLE [RS_63]
-                                              PartitionCols:_col0
-                                              Select Operator [SEL_58] (rows=8116 width=1119)
-                                                Output:["_col0","_col1"]
-                                                Filter Operator [FIL_115] (rows=8116 width=1119)
-                                                  predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                  TableScan [TS_56] (rows=73049 width=1119)
-                                                    default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
                         <-Reducer 4 [CONTAINS]
                           Reduce Output Operator [RS_82]
                             PartitionCols:_col0, _col1, _col2
@@ -191,20 +189,27 @@ Stage-0
                                       Output:["_col0","_col1","_col2"],keys:_col7, _col6, _col3
                                       Merge Join Operator [MERGEJOIN_119] (rows=696954748 width=88)
                                         Conds:RS_12._col1=RS_13._col0(Inner),Output:["_col3","_col6","_col7"]
-                                      <-Map 9 [SIMPLE_EDGE]
+                                      <-Map 15 [SIMPLE_EDGE]
                                         SHUFFLE [RS_13]
                                           PartitionCols:_col0
                                           Select Operator [SEL_8] (rows=80000000 width=860)
                                             Output:["_col0","_col1","_col2"]
                                             Filter Operator [FIL_110] (rows=80000000 width=860)
                                               predicate:c_customer_sk is not null
-                                              TableScan [TS_6] (rows=80000000 width=860)
-                                                default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_first_name","c_last_name"]
+                                               Please refer to the previous TableScan [TS_6]
                                       <-Reducer 2 [SIMPLE_EDGE]
                                         SHUFFLE [RS_12]
                                           PartitionCols:_col1
                                           Merge Join Operator [MERGEJOIN_118] (rows=633595212 width=88)
                                             Conds:RS_9._col0=RS_10._col0(Inner),Output:["_col1","_col3"]
+                                          <-Map 8 [SIMPLE_EDGE]
+                                            SHUFFLE [RS_10]
+                                              PartitionCols:_col0
+                                              Select Operator [SEL_5] (rows=8116 width=1119)
+                                                Output:["_col0","_col1"]
+                                                Filter Operator [FIL_109] (rows=8116 width=1119)
+                                                  predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                                   Please refer to the previous TableScan [TS_3]
                                           <-Map 1 [SIMPLE_EDGE]
                                             SHUFFLE [RS_9]
                                               PartitionCols:_col0
@@ -214,13 +219,4 @@ Stage-0
                                                   predicate:(ss_sold_date_sk is not null and ss_customer_sk is not null)
                                                   TableScan [TS_0] (rows=575995635 width=88)
                                                     default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk"]
-                                          <-Map 8 [SIMPLE_EDGE]
-                                            SHUFFLE [RS_10]
-                                              PartitionCols:_col0
-                                              Select Operator [SEL_5] (rows=8116 width=1119)
-                                                Output:["_col0","_col1"]
-                                                Filter Operator [FIL_109] (rows=8116 width=1119)
-                                                  predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                  TableScan [TS_3] (rows=73049 width=1119)
-                                                    default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query39.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query39.q.out b/ql/src/test/results/clientpositive/perf/query39.q.out
index 19472c4..dcf3cb2 100644
--- a/ql/src/test/results/clientpositive/perf/query39.q.out
+++ b/ql/src/test/results/clientpositive/perf/query39.q.out
@@ -5,16 +5,16 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 16 (SIMPLE_EDGE)
-Reducer 13 <- Map 17 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
-Reducer 14 <- Map 18 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-Reducer 15 <- Reducer 14 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
-Reducer 3 <- Map 9 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 10 <- Map 14 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
+Reducer 3 <- Map 13 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 14 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
-Reducer 6 <- Reducer 15 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+Reducer 6 <- Reducer 11 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
+Reducer 9 <- Map 13 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -30,7 +30,7 @@ Stage-0
               Output:["_col0","_col1","_col3","_col4","_col5","_col6","_col8","_col9"]
               Merge Join Operator [MERGEJOIN_103] (rows=13756683 width=15)
                 Conds:RS_56._col1, _col2=RS_57._col1, _col2(Inner),Output:["_col1","_col2","_col3","_col4","_col6","_col7","_col8","_col9"]
-              <-Reducer 15 [SIMPLE_EDGE]
+              <-Reducer 11 [SIMPLE_EDGE]
                 SHUFFLE [RS_57]
                   PartitionCols:_col1, _col2
                   Select Operator [SEL_55] (rows=12506076 width=15)
@@ -41,58 +41,58 @@ Stage-0
                         Output:["_col1","_col2","_col3","_col4"]
                         Group By Operator [GBY_52] (rows=25012152 width=15)
                           Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["stddev_samp(VALUE._col0)","avg(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2
-                        <-Reducer 14 [SIMPLE_EDGE]
+                        <-Reducer 10 [SIMPLE_EDGE]
                           SHUFFLE [RS_51]
                             PartitionCols:_col0, _col1, _col2
                             Group By Operator [GBY_50] (rows=50024305 width=15)
                               Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["stddev_samp(_col3)","avg(_col3)"],keys:_col8, _col7, _col9
                               Merge Join Operator [MERGEJOIN_102] (rows=50024305 width=15)
                                 Conds:RS_46._col2=RS_47._col0(Inner),Output:["_col3","_col7","_col8","_col9"]
-                              <-Map 18 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_47]
                                   PartitionCols:_col0
                                   Select Operator [SEL_39] (rows=27 width=1029)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_96] (rows=27 width=1029)
                                       predicate:w_warehouse_sk is not null
-                                      TableScan [TS_37] (rows=27 width=1029)
+                                      TableScan [TS_9] (rows=27 width=1029)
                                         default@warehouse,warehouse,Tbl:COMPLETE,Col:NONE,Output:["w_warehouse_sk","w_warehouse_name"]
-                              <-Reducer 13 [SIMPLE_EDGE]
+                              <-Reducer 9 [SIMPLE_EDGE]
                                 SHUFFLE [RS_46]
                                   PartitionCols:_col2
                                   Merge Join Operator [MERGEJOIN_101] (rows=45476640 width=15)
                                     Conds:RS_43._col1=RS_44._col0(Inner),Output:["_col2","_col3","_col7"]
-                                  <-Map 17 [SIMPLE_EDGE]
+                                  <-Map 13 [SIMPLE_EDGE]
                                     SHUFFLE [RS_44]
                                       PartitionCols:_col0
                                       Select Operator [SEL_36] (rows=462000 width=1436)
                                         Output:["_col0"]
                                         Filter Operator [FIL_95] (rows=462000 width=1436)
                                           predicate:i_item_sk is not null
-                                          TableScan [TS_34] (rows=462000 width=1436)
+                                          TableScan [TS_6] (rows=462000 width=1436)
                                             default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk"]
-                                  <-Reducer 12 [SIMPLE_EDGE]
+                                  <-Reducer 8 [SIMPLE_EDGE]
                                     SHUFFLE [RS_43]
                                       PartitionCols:_col1
                                       Merge Join Operator [MERGEJOIN_100] (rows=41342400 width=15)
                                         Conds:RS_40._col0=RS_41._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 11 [SIMPLE_EDGE]
+                                      <-Map 1 [SIMPLE_EDGE]
                                         SHUFFLE [RS_40]
                                           PartitionCols:_col0
                                           Select Operator [SEL_30] (rows=37584000 width=15)
                                             Output:["_col0","_col1","_col2","_col3"]
                                             Filter Operator [FIL_93] (rows=37584000 width=15)
                                               predicate:(inv_item_sk is not null and inv_warehouse_sk is not null and inv_date_sk is not null)
-                                              TableScan [TS_28] (rows=37584000 width=15)
+                                              TableScan [TS_0] (rows=37584000 width=15)
                                                 default@inventory,inventory,Tbl:COMPLETE,Col:NONE,Output:["inv_date_sk","inv_item_sk","inv_warehouse_sk","inv_quantity_on_hand"]
-                                      <-Map 16 [SIMPLE_EDGE]
+                                      <-Map 12 [SIMPLE_EDGE]
                                         SHUFFLE [RS_41]
                                           PartitionCols:_col0
                                           Select Operator [SEL_33] (rows=18262 width=1119)
                                             Output:["_col0"]
                                             Filter Operator [FIL_94] (rows=18262 width=1119)
                                               predicate:((d_year = 1999) and (d_moy = 4) and d_date_sk is not null)
-                                              TableScan [TS_31] (rows=73049 width=1119)
+                                              TableScan [TS_3] (rows=73049 width=1119)
                                                 default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
               <-Reducer 5 [SIMPLE_EDGE]
                 SHUFFLE [RS_56]
@@ -112,29 +112,27 @@ Stage-0
                               Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["stddev_samp(_col3)","avg(_col3)"],keys:_col8, _col7, _col9
                               Merge Join Operator [MERGEJOIN_99] (rows=50024305 width=15)
                                 Conds:RS_18._col2=RS_19._col0(Inner),Output:["_col3","_col7","_col8","_col9"]
-                              <-Map 10 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_19]
                                   PartitionCols:_col0
                                   Select Operator [SEL_11] (rows=27 width=1029)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_92] (rows=27 width=1029)
                                       predicate:w_warehouse_sk is not null
-                                      TableScan [TS_9] (rows=27 width=1029)
-                                        default@warehouse,warehouse,Tbl:COMPLETE,Col:NONE,Output:["w_warehouse_sk","w_warehouse_name"]
+                                       Please refer to the previous TableScan [TS_9]
                               <-Reducer 3 [SIMPLE_EDGE]
                                 SHUFFLE [RS_18]
                                   PartitionCols:_col2
                                   Merge Join Operator [MERGEJOIN_98] (rows=45476640 width=15)
                                     Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col2","_col3","_col7"]
-                                  <-Map 9 [SIMPLE_EDGE]
+                                  <-Map 13 [SIMPLE_EDGE]
                                     SHUFFLE [RS_16]
                                       PartitionCols:_col0
                                       Select Operator [SEL_8] (rows=462000 width=1436)
                                         Output:["_col0"]
                                         Filter Operator [FIL_91] (rows=462000 width=1436)
                                           predicate:i_item_sk is not null
-                                          TableScan [TS_6] (rows=462000 width=1436)
-                                            default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk"]
+                                           Please refer to the previous TableScan [TS_6]
                                   <-Reducer 2 [SIMPLE_EDGE]
                                     SHUFFLE [RS_15]
                                       PartitionCols:_col1
@@ -147,17 +145,15 @@ Stage-0
                                             Output:["_col0","_col1","_col2","_col3"]
                                             Filter Operator [FIL_89] (rows=37584000 width=15)
                                               predicate:(inv_item_sk is not null and inv_warehouse_sk is not null and inv_date_sk is not null)
-                                              TableScan [TS_0] (rows=37584000 width=15)
-                                                default@inventory,inventory,Tbl:COMPLETE,Col:NONE,Output:["inv_date_sk","inv_item_sk","inv_warehouse_sk","inv_quantity_on_hand"]
-                                      <-Map 8 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_0]
+                                      <-Map 12 [SIMPLE_EDGE]
                                         SHUFFLE [RS_13]
                                           PartitionCols:_col0
                                           Select Operator [SEL_5] (rows=18262 width=1119)
                                             Output:["_col0"]
                                             Filter Operator [FIL_90] (rows=18262 width=1119)
                                               predicate:((d_year = 1999) and (d_moy = 3) and d_date_sk is not null)
-                                              TableScan [TS_3] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                               Please refer to the previous TableScan [TS_3]
 
 PREHOOK: query: with inv as (select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy ,stdev,mean, case mean when 0 then null else stdev/mean end cov from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean from inventory ,item ,warehouse ,date_dim where inv_item_sk = i_item_sk and inv_warehouse_sk = w_warehouse_sk and inv_date_sk = d_date_sk and d_year =1999 group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo where case mean when 0 then 0 else stdev/mean end > 1) select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov from inv inv1,inv inv2 where inv1.i_item_sk = inv2.i_item_sk and inv1.w_warehouse_sk = inv2.w_warehouse_sk and inv1.d_moy=3 and inv2.d_moy=3+1 and inv1.cov > 1.5 order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov ,inv2.d_moy,inv2.mean, inv2.cov
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query46.q.out b/ql/src/test/results/clientpositive/perf/query46.q.out
index 556e4b8..8c6e914 100644
--- a/ql/src/test/results/clientpositive/perf/query46.q.out
+++ b/ql/src/test/results/clientpositive/perf/query46.q.out
@@ -5,14 +5,14 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 15 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+Reducer 10 <- Map 13 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Map 14 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-Reducer 7 <- Map 12 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
-Reducer 8 <- Map 13 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-Reducer 9 <- Map 14 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 6 <- Map 5 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 9 <- Map 12 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -32,30 +32,52 @@ Stage-0
                   predicate:(_col5 <> _col8)
                   Merge Join Operator [MERGEJOIN_86] (rows=463823414 width=88)
                     Conds:RS_42._col0=RS_43._col1(Inner),Output:["_col2","_col3","_col5","_col6","_col8","_col9","_col10"]
-                  <-Reducer 11 [SIMPLE_EDGE]
+                  <-Reducer 2 [SIMPLE_EDGE]
+                    SHUFFLE [RS_42]
+                      PartitionCols:_col0
+                      Merge Join Operator [MERGEJOIN_81] (rows=88000001 width=860)
+                        Conds:RS_39._col1=RS_40._col0(Inner),Output:["_col0","_col2","_col3","_col5"]
+                      <-Map 5 [SIMPLE_EDGE]
+                        SHUFFLE [RS_40]
+                          PartitionCols:_col0
+                          Select Operator [SEL_5] (rows=40000000 width=1014)
+                            Output:["_col0","_col1"]
+                            Filter Operator [FIL_75] (rows=40000000 width=1014)
+                              predicate:ca_address_sk is not null
+                              TableScan [TS_3] (rows=40000000 width=1014)
+                                default@customer_address,current_addr,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_city"]
+                      <-Map 1 [SIMPLE_EDGE]
+                        SHUFFLE [RS_39]
+                          PartitionCols:_col1
+                          Select Operator [SEL_2] (rows=80000000 width=860)
+                            Output:["_col0","_col1","_col2","_col3"]
+                            Filter Operator [FIL_74] (rows=80000000 width=860)
+                              predicate:(c_customer_sk is not null and c_current_addr_sk is not null)
+                              TableScan [TS_0] (rows=80000000 width=860)
+                                default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_current_addr_sk","c_first_name","c_last_name"]
+                  <-Reducer 7 [SIMPLE_EDGE]
                     SHUFFLE [RS_43]
                       PartitionCols:_col1
                       Select Operator [SEL_37] (rows=421657640 width=88)
                         Output:["_col0","_col1","_col2","_col3","_col4"]
                         Group By Operator [GBY_36] (rows=421657640 width=88)
                           Output:["_col0","_col1","_col2","_col3","_col4","_col5"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3
-                        <-Reducer 10 [SIMPLE_EDGE]
+                        <-Reducer 6 [SIMPLE_EDGE]
                           SHUFFLE [RS_35]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_34] (rows=843315281 width=88)
                               Output:["_col0","_col1","_col2","_col3","_col4","_col5"],aggregations:["sum(_col6)","sum(_col7)"],keys:_col1, _col17, _col3, _col5
                               Merge Join Operator [MERGEJOIN_85] (rows=843315281 width=88)
                                 Conds:RS_30._col3=RS_31._col0(Inner),Output:["_col1","_col3","_col5","_col6","_col7","_col17"]
-                              <-Map 15 [SIMPLE_EDGE]
+                              <-Map 5 [SIMPLE_EDGE]
                                 SHUFFLE [RS_31]
                                   PartitionCols:_col0
                                   Select Operator [SEL_20] (rows=40000000 width=1014)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_80] (rows=40000000 width=1014)
                                       predicate:ca_address_sk is not null
-                                      TableScan [TS_18] (rows=40000000 width=1014)
-                                        default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_city"]
-                              <-Reducer 9 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_3]
+                              <-Reducer 11 [SIMPLE_EDGE]
                                 SHUFFLE [RS_30]
                                   PartitionCols:_col3
                                   Merge Join Operator [MERGEJOIN_84] (rows=766650239 width=88)
@@ -69,7 +91,7 @@ Stage-0
                                           predicate:(((hd_dep_count = 4) or (hd_vehicle_count = 2)) and hd_demo_sk is not null)
                                           TableScan [TS_15] (rows=7200 width=107)
                                             default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                                  <-Reducer 8 [SIMPLE_EDGE]
+                                  <-Reducer 10 [SIMPLE_EDGE]
                                     SHUFFLE [RS_27]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_83] (rows=696954748 width=88)
@@ -83,7 +105,7 @@ Stage-0
                                               predicate:((s_city) IN ('Rosedale', 'Bethlehem', 'Clinton', 'Clifton', 'Springfield') and s_store_sk is not null)
                                               TableScan [TS_12] (rows=1704 width=1910)
                                                 default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_city"]
-                                      <-Reducer 7 [SIMPLE_EDGE]
+                                      <-Reducer 9 [SIMPLE_EDGE]
                                         SHUFFLE [RS_24]
                                           PartitionCols:_col4
                                           Merge Join Operator [MERGEJOIN_82] (rows=633595212 width=88)
@@ -97,7 +119,7 @@ Stage-0
                                                   predicate:((d_dow) IN (6, 0) and (d_year) IN (1998, 1999, 2000) and d_date_sk is not null)
                                                   TableScan [TS_9] (rows=73049 width=1119)
                                                     default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_dow"]
-                                          <-Map 6 [SIMPLE_EDGE]
+                                          <-Map 8 [SIMPLE_EDGE]
                                             SHUFFLE [RS_21]
                                               PartitionCols:_col0
                                               Select Operator [SEL_8] (rows=575995635 width=88)
@@ -106,27 +128,4 @@ Stage-0
                                                   predicate:(ss_sold_date_sk is not null and ss_store_sk is not null and ss_hdemo_sk is not null and ss_addr_sk is not null and ss_customer_sk is not null)
                                                   TableScan [TS_6] (rows=575995635 width=88)
                                                     default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk","ss_hdemo_sk","ss_addr_sk","ss_store_sk","ss_ticket_number","ss_coupon_amt","ss_net_profit"]
-                  <-Reducer 2 [SIMPLE_EDGE]
-                    SHUFFLE [RS_42]
-                      PartitionCols:_col0
-                      Merge Join Operator [MERGEJOIN_81] (rows=88000001 width=860)
-                        Conds:RS_39._col1=RS_40._col0(Inner),Output:["_col0","_col2","_col3","_col5"]
-                      <-Map 1 [SIMPLE_EDGE]
-                        SHUFFLE [RS_39]
-                          PartitionCols:_col1
-                          Select Operator [SEL_2] (rows=80000000 width=860)
-                            Output:["_col0","_col1","_col2","_col3"]
-                            Filter Operator [FIL_74] (rows=80000000 width=860)
-                              predicate:(c_customer_sk is not null and c_current_addr_sk is not null)
-                              TableScan [TS_0] (rows=80000000 width=860)
-                                default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_current_addr_sk","c_first_name","c_last_name"]
-                      <-Map 5 [SIMPLE_EDGE]
-                        SHUFFLE [RS_40]
-                          PartitionCols:_col0
-                          Select Operator [SEL_5] (rows=40000000 width=1014)
-                            Output:["_col0","_col1"]
-                            Filter Operator [FIL_75] (rows=40000000 width=1014)
-                              predicate:ca_address_sk is not null
-                              TableScan [TS_3] (rows=40000000 width=1014)
-                                default@customer_address,current_addr,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_city"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query5.q.out b/ql/src/test/results/clientpositive/perf/query5.q.out
index ad78d7e..a3f2d58 100644
--- a/ql/src/test/results/clientpositive/perf/query5.q.out
+++ b/ql/src/test/results/clientpositive/perf/query5.q.out
@@ -256,19 +256,19 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
-Map 12 <- Union 13 (CONTAINS)
-Map 17 <- Union 13 (CONTAINS)
-Map 20 <- Union 21 (CONTAINS)
+Map 18 <- Union 19 (CONTAINS)
+Map 20 <- Union 19 (CONTAINS)
+Map 22 <- Union 23 (CONTAINS)
 Map 9 <- Union 2 (CONTAINS)
-Reducer 14 <- Map 18 (SIMPLE_EDGE), Union 13 (SIMPLE_EDGE)
-Reducer 15 <- Map 19 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+Reducer 11 <- Map 10 (SIMPLE_EDGE), Union 19 (SIMPLE_EDGE)
+Reducer 12 <- Map 21 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 14 <- Map 10 (SIMPLE_EDGE), Union 23 (SIMPLE_EDGE)
+Reducer 15 <- Map 27 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
 Reducer 16 <- Reducer 15 (SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 22 <- Map 28 (SIMPLE_EDGE), Union 21 (SIMPLE_EDGE)
-Reducer 23 <- Map 29 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
-Reducer 24 <- Reducer 23 (SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 26 <- Map 25 (SIMPLE_EDGE), Map 27 (SIMPLE_EDGE), Union 21 (CONTAINS)
+Reducer 25 <- Map 24 (SIMPLE_EDGE), Map 26 (SIMPLE_EDGE), Union 23 (CONTAINS)
 Reducer 3 <- Map 10 (SIMPLE_EDGE), Union 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 4 <- Map 17 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE)
 Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
@@ -290,7 +290,7 @@ Stage-0
                 Group By Operator [GBY_88] (rows=1136898901 width=110)
                   Output:["_col0","_col1","_col3","_col4","_col5"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)"],keys:KEY._col0, KEY._col1, KEY._col2
                 <-Union 6 [SIMPLE_EDGE]
-                  <-Reducer 16 [CONTAINS]
+                  <-Reducer 13 [CONTAINS]
                     Reduce Output Operator [RS_87]
                       PartitionCols:_col0, _col1, _col2
                       Group By Operator [GBY_86] (rows=2273797803 width=110)
@@ -299,14 +299,14 @@ Stage-0
                           Output:["_col0","_col1","_col2","_col3","_col4"]
                           Group By Operator [GBY_48] (rows=191657181 width=132)
                             Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)","sum(VALUE._col3)"],keys:KEY._col0
-                          <-Reducer 15 [SIMPLE_EDGE]
+                          <-Reducer 12 [SIMPLE_EDGE]
                             SHUFFLE [RS_47]
                               PartitionCols:_col0
                               Group By Operator [GBY_46] (rows=383314363 width=132)
                                 Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(_col2)","sum(_col4)","sum(_col3)","sum(_col5)"],keys:_col9
                                 Merge Join Operator [MERGEJOIN_136] (rows=383314363 width=132)
                                   Conds:RS_42._col0=RS_43._col0(Inner),Output:["_col2","_col3","_col4","_col5","_col9"]
-                                <-Map 19 [SIMPLE_EDGE]
+                                <-Map 21 [SIMPLE_EDGE]
                                   SHUFFLE [RS_43]
                                     PartitionCols:_col0
                                     Select Operator [SEL_38] (rows=46000 width=460)
@@ -315,22 +315,22 @@ Stage-0
                                         predicate:cp_catalog_page_sk is not null
                                         TableScan [TS_36] (rows=46000 width=460)
                                           default@catalog_page,catalog_page,Tbl:COMPLETE,Col:NONE,Output:["cp_catalog_page_sk","cp_catalog_page_id"]
-                                <-Reducer 14 [SIMPLE_EDGE]
+                                <-Reducer 11 [SIMPLE_EDGE]
                                   SHUFFLE [RS_42]
                                     PartitionCols:_col0
                                     Merge Join Operator [MERGEJOIN_135] (rows=348467596 width=132)
-                                      Conds:Union 13._col1=RS_40._col0(Inner),Output:["_col0","_col2","_col3","_col4","_col5"]
-                                    <-Map 18 [SIMPLE_EDGE]
+                                      Conds:Union 19._col1=RS_40._col0(Inner),Output:["_col0","_col2","_col3","_col4","_col5"]
+                                    <-Map 10 [SIMPLE_EDGE]
                                       SHUFFLE [RS_40]
                                         PartitionCols:_col0
                                         Select Operator [SEL_35] (rows=8116 width=1119)
                                           Output:["_col0"]
                                           Filter Operator [FIL_125] (rows=8116 width=1119)
                                             predicate:(CAST( d_date AS TIMESTAMP) BETWEEN 1998-08-04 00:00:00.0 AND 1998-08-18 00:00:00.0 and d_date_sk is not null)
-                                            TableScan [TS_33] (rows=73049 width=1119)
+                                            TableScan [TS_8] (rows=73049 width=1119)
                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                    <-Union 13 [SIMPLE_EDGE]
-                                      <-Map 12 [CONTAINS]
+                                    <-Union 19 [SIMPLE_EDGE]
+                                      <-Map 18 [CONTAINS]
                                         Reduce Output Operator [RS_39]
                                           PartitionCols:_col1
                                           Select Operator [SEL_27] (rows=287989836 width=135)
@@ -339,7 +339,7 @@ Stage-0
                                               predicate:(cs_sold_date_sk is not null and cs_catalog_page_sk is not null)
                                               TableScan [TS_25] (rows=287989836 width=135)
                                                 Output:["cs_sold_date_sk","cs_catalog_page_sk","cs_ext_sales_price","cs_net_profit"]
-                                      <-Map 17 [CONTAINS]
+                                      <-Map 20 [CONTAINS]
                                         Reduce Output Operator [RS_39]
                                           PartitionCols:_col1
                                           Select Operator [SEL_30] (rows=28798881 width=106)
@@ -348,7 +348,7 @@ Stage-0
                                               predicate:(cr_returned_date_sk is not null and cr_catalog_page_sk is not null)
                                               TableScan [TS_28] (rows=28798881 width=106)
                                                 Output:["cr_returned_date_sk","cr_catalog_page_sk","cr_return_amount","cr_net_loss"]
-                  <-Reducer 24 [CONTAINS]
+                  <-Reducer 16 [CONTAINS]
                     Reduce Output Operator [RS_87]
                       PartitionCols:_col0, _col1, _col2
                       Group By Operator [GBY_86] (rows=2273797803 width=110)
@@ -357,14 +357,14 @@ Stage-0
                           Output:["_col0","_col1","_col2","_col3","_col4"]
                           Group By Operator [GBY_82] (rows=182955399 width=135)
                             Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)","sum(VALUE._col3)"],keys:KEY._col0
-                          <-Reducer 23 [SIMPLE_EDGE]
+                          <-Reducer 15 [SIMPLE_EDGE]
                             SHUFFLE [RS_81]
                               PartitionCols:_col0
                               Group By Operator [GBY_80] (rows=365910798 width=135)
                                 Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(_col2)","sum(_col4)","sum(_col3)","sum(_col5)"],keys:_col9
                                 Merge Join Operator [MERGEJOIN_138] (rows=365910798 width=135)
                                   Conds:RS_76._col0=RS_77._col0(Inner),Output:["_col2","_col3","_col4","_col5","_col9"]
-                                <-Map 29 [SIMPLE_EDGE]
+                                <-Map 27 [SIMPLE_EDGE]
                                   SHUFFLE [RS_77]
                                     PartitionCols:_col0
                                     Select Operator [SEL_72] (rows=84 width=1850)
@@ -373,22 +373,21 @@ Stage-0
                                         predicate:web_site_sk is not null
                                         TableScan [TS_70] (rows=84 width=1850)
                                           default@web_site,web_site,Tbl:COMPLETE,Col:NONE,Output:["web_site_sk","web_site_id"]
-                                <-Reducer 22 [SIMPLE_EDGE]
+                                <-Reducer 14 [SIMPLE_EDGE]
                                   SHUFFLE [RS_76]
                                     PartitionCols:_col0
                                     Merge Join Operator [MERGEJOIN_137] (rows=332646173 width=135)
-                                      Conds:Union 21._col1=RS_74._col0(Inner),Output:["_col0","_col2","_col3","_col4","_col5"]
-                                    <-Map 28 [SIMPLE_EDGE]
+                                      Conds:Union 23._col1=RS_74._col0(Inner),Output:["_col0","_col2","_col3","_col4","_col5"]
+                                    <-Map 10 [SIMPLE_EDGE]
                                       SHUFFLE [RS_74]
                                         PartitionCols:_col0
                                         Select Operator [SEL_69] (rows=8116 width=1119)
                                           Output:["_col0"]
                                           Filter Operator [FIL_130] (rows=8116 width=1119)
                                             predicate:(CAST( d_date AS TIMESTAMP) BETWEEN 1998-08-04 00:00:00.0 AND 1998-08-18 00:00:00.0 and d_date_sk is not null)
-                                            TableScan [TS_67] (rows=73049 width=1119)
-                                              default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                    <-Union 21 [SIMPLE_EDGE]
-                                      <-Map 20 [CONTAINS]
+                                             Please refer to the previous TableScan [TS_8]
+                                    <-Union 23 [SIMPLE_EDGE]
+                                      <-Map 22 [CONTAINS]
                                         Reduce Output Operator [RS_73]
                                           PartitionCols:_col1
                                           Select Operator [SEL_54] (rows=144002668 width=135)
@@ -397,14 +396,14 @@ Stage-0
                                               predicate:(ws_sold_date_sk is not null and ws_web_site_sk is not null)
                                               TableScan [TS_52] (rows=144002668 width=135)
                                                 Output:["ws_sold_date_sk","ws_web_site_sk","ws_ext_sales_price","ws_net_profit"]
-                                      <-Reducer 26 [CONTAINS]
+                                      <-Reducer 25 [CONTAINS]
                                         Reduce Output Operator [RS_73]
                                           PartitionCols:_col1
                                           Select Operator [SEL_64] (rows=158402938 width=135)
                                             Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                             Merge Join Operator [MERGEJOIN_132] (rows=158402938 width=135)
                                               Conds:RS_61._col0, _col2=RS_62._col1, _col2(Inner),Output:["_col1","_col3","_col6","_col7"]
-                                            <-Map 25 [SIMPLE_EDGE]
+                                            <-Map 24 [SIMPLE_EDGE]
                                               SHUFFLE [RS_61]
                                                 PartitionCols:_col0, _col2
                                                 Select Operator [SEL_57] (rows=144002668 width=135)
@@ -413,7 +412,7 @@ Stage-0
                                                     predicate:(ws_web_site_sk is not null and ws_order_number is not null and ws_item_sk is not null)
                                                     TableScan [TS_55] (rows=144002668 width=135)
                                                       default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_item_sk","ws_web_site_sk","ws_order_number"]
-                                            <-Map 27 [SIMPLE_EDGE]
+                                            <-Map 26 [SIMPLE_EDGE]
                                               SHUFFLE [RS_62]
                                                 PartitionCols:_col1, _col2
                                                 Select Operator [SEL_60] (rows=14398467 width=92)
@@ -438,7 +437,7 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(_col2)","sum(_col4)","sum(_col3)","sum(_col5)"],keys:_col9
                                 Merge Join Operator [MERGEJOIN_134] (rows=766640042 width=87)
                                   Conds:RS_17._col0=RS_18._col0(Inner),Output:["_col2","_col3","_col4","_col5","_col9"]
-                                <-Map 11 [SIMPLE_EDGE]
+                                <-Map 17 [SIMPLE_EDGE]
                                   SHUFFLE [RS_18]
                                     PartitionCols:_col0
                                     Select Operator [SEL_13] (rows=1704 width=1910)
@@ -459,8 +458,7 @@ Stage-0
                                           Output:["_col0"]
                                           Filter Operator [FIL_121] (rows=8116 width=1119)
                                             predicate:(CAST( d_date AS TIMESTAMP) BETWEEN 1998-08-04 00:00:00.0 AND 1998-08-18 00:00:00.0 and d_date_sk is not null)
-                                            TableScan [TS_8] (rows=73049 width=1119)
-                                              default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
+                                             Please refer to the previous TableScan [TS_8]
                                     <-Union 2 [SIMPLE_EDGE]
                                       <-Map 1 [CONTAINS]
                                         Reduce Output Operator [RS_14]


[16/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query25.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query25.q.out b/ql/src/test/results/clientpositive/perf/query25.q.out
index 7e15c26..2b1c064 100644
--- a/ql/src/test/results/clientpositive/perf/query25.q.out
+++ b/ql/src/test/results/clientpositive/perf/query25.q.out
@@ -6,14 +6,14 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 10 <- Map 14 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 15 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
-Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 17 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 15 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-Reducer 9 <- Map 13 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 7 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 9 <- Map 7 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -36,7 +36,7 @@ Stage-0
                     Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["sum(_col18)","sum(_col23)","sum(_col3)"],keys:_col28, _col29, _col8, _col9
                     Merge Join Operator [MERGEJOIN_100] (rows=1020411534 width=88)
                       Conds:RS_43._col14=RS_44._col0(Inner),Output:["_col3","_col8","_col9","_col18","_col23","_col28","_col29"]
-                    <-Map 17 [SIMPLE_EDGE]
+                    <-Map 15 [SIMPLE_EDGE]
                       SHUFFLE [RS_44]
                         PartitionCols:_col0
                         Select Operator [SEL_36] (rows=462000 width=1436)
@@ -50,14 +50,14 @@ Stage-0
                         PartitionCols:_col14
                         Merge Join Operator [MERGEJOIN_99] (rows=927646829 width=88)
                           Conds:RS_40._col1, _col2=RS_41._col14, _col13(Inner),Output:["_col3","_col8","_col9","_col14","_col18","_col23"]
-                        <-Reducer 12 [SIMPLE_EDGE]
+                        <-Reducer 10 [SIMPLE_EDGE]
                           SHUFFLE [RS_41]
                             PartitionCols:_col14, _col13
                             Select Operator [SEL_33] (rows=843315281 width=88)
                               Output:["_col1","_col2","_col7","_col11","_col13","_col14","_col16"]
                               Merge Join Operator [MERGEJOIN_98] (rows=843315281 width=88)
                                 Conds:RS_30._col3=RS_31._col0(Inner),Output:["_col1","_col5","_col7","_col8","_col10","_col18","_col19"]
-                              <-Map 16 [SIMPLE_EDGE]
+                              <-Map 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_31]
                                   PartitionCols:_col0
                                   Select Operator [SEL_20] (rows=1704 width=1910)
@@ -66,39 +66,47 @@ Stage-0
                                       predicate:s_store_sk is not null
                                       TableScan [TS_18] (rows=1704 width=1910)
                                         default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_id","s_store_name"]
-                              <-Reducer 11 [SIMPLE_EDGE]
+                              <-Reducer 9 [SIMPLE_EDGE]
                                 SHUFFLE [RS_30]
                                   PartitionCols:_col3
                                   Merge Join Operator [MERGEJOIN_97] (rows=766650239 width=88)
                                     Conds:RS_27._col6=RS_28._col0(Inner),Output:["_col1","_col3","_col5","_col7","_col8","_col10"]
-                                  <-Map 15 [SIMPLE_EDGE]
+                                  <-Map 7 [SIMPLE_EDGE]
                                     SHUFFLE [RS_28]
                                       PartitionCols:_col0
                                       Select Operator [SEL_17] (rows=4058 width=1119)
                                         Output:["_col0"]
                                         Filter Operator [FIL_91] (rows=4058 width=1119)
                                           predicate:(d_moy BETWEEN 4 AND 10 and (d_year = 1998) and d_date_sk is not null)
-                                          TableScan [TS_15] (rows=73049 width=1119)
-                                            default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                                  <-Reducer 10 [SIMPLE_EDGE]
+                                          TableScan [TS_3] (rows=73049 width=1119)
+                                            default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                  <-Reducer 8 [SIMPLE_EDGE]
                                     SHUFFLE [RS_27]
                                       PartitionCols:_col6
                                       Merge Join Operator [MERGEJOIN_96] (rows=696954748 width=88)
                                         Conds:RS_24._col0=RS_25._col0(Inner),Output:["_col1","_col3","_col5","_col6","_col7","_col8","_col10"]
-                                      <-Map 14 [SIMPLE_EDGE]
+                                      <-Map 7 [SIMPLE_EDGE]
                                         SHUFFLE [RS_25]
                                           PartitionCols:_col0
                                           Select Operator [SEL_14] (rows=18262 width=1119)
                                             Output:["_col0"]
                                             Filter Operator [FIL_90] (rows=18262 width=1119)
                                               predicate:((d_moy = 4) and (d_year = 1998) and d_date_sk is not null)
-                                              TableScan [TS_12] (rows=73049 width=1119)
-                                                default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                                      <-Reducer 9 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_3]
+                                      <-Reducer 12 [SIMPLE_EDGE]
                                         SHUFFLE [RS_24]
                                           PartitionCols:_col0
                                           Merge Join Operator [MERGEJOIN_95] (rows=633595212 width=88)
                                             Conds:RS_21._col1, _col2, _col4=RS_22._col1, _col2, _col3(Inner),Output:["_col0","_col1","_col3","_col5","_col6","_col7","_col8","_col10"]
+                                          <-Map 11 [SIMPLE_EDGE]
+                                            SHUFFLE [RS_21]
+                                              PartitionCols:_col1, _col2, _col4
+                                              Select Operator [SEL_8] (rows=575995635 width=88)
+                                                Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
+                                                Filter Operator [FIL_88] (rows=575995635 width=88)
+                                                  predicate:(ss_item_sk is not null and ss_customer_sk is not null and ss_ticket_number is not null and ss_sold_date_sk is not null and ss_store_sk is not null)
+                                                  TableScan [TS_6] (rows=575995635 width=88)
+                                                    default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk","ss_store_sk","ss_ticket_number","ss_net_profit"]
                                           <-Map 13 [SIMPLE_EDGE]
                                             SHUFFLE [RS_22]
                                               PartitionCols:_col1, _col2, _col3
@@ -108,20 +116,19 @@ Stage-0
                                                   predicate:(sr_item_sk is not null and sr_customer_sk is not null and sr_ticket_number is not null and sr_returned_date_sk is not null)
                                                   TableScan [TS_9] (rows=57591150 width=77)
                                                     default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_returned_date_sk","sr_item_sk","sr_customer_sk","sr_ticket_number","sr_net_loss"]
-                                          <-Map 8 [SIMPLE_EDGE]
-                                            SHUFFLE [RS_21]
-                                              PartitionCols:_col1, _col2, _col4
-                                              Select Operator [SEL_8] (rows=575995635 width=88)
-                                                Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-                                                Filter Operator [FIL_88] (rows=575995635 width=88)
-                                                  predicate:(ss_item_sk is not null and ss_customer_sk is not null and ss_ticket_number is not null and ss_sold_date_sk is not null and ss_store_sk is not null)
-                                                  TableScan [TS_6] (rows=575995635 width=88)
-                                                    default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk","ss_store_sk","ss_ticket_number","ss_net_profit"]
                         <-Reducer 2 [SIMPLE_EDGE]
                           SHUFFLE [RS_40]
                             PartitionCols:_col1, _col2
                             Merge Join Operator [MERGEJOIN_94] (rows=316788826 width=135)
                               Conds:RS_37._col0=RS_38._col0(Inner),Output:["_col1","_col2","_col3"]
+                            <-Map 7 [SIMPLE_EDGE]
+                              SHUFFLE [RS_38]
+                                PartitionCols:_col0
+                                Select Operator [SEL_5] (rows=4058 width=1119)
+                                  Output:["_col0"]
+                                  Filter Operator [FIL_87] (rows=4058 width=1119)
+                                    predicate:(d_moy BETWEEN 4 AND 10 and (d_year = 1998) and d_date_sk is not null)
+                                     Please refer to the previous TableScan [TS_3]
                             <-Map 1 [SIMPLE_EDGE]
                               SHUFFLE [RS_37]
                                 PartitionCols:_col0
@@ -131,13 +138,4 @@ Stage-0
                                     predicate:(cs_bill_customer_sk is not null and cs_item_sk is not null and cs_sold_date_sk is not null)
                                     TableScan [TS_0] (rows=287989836 width=135)
                                       default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_customer_sk","cs_item_sk","cs_net_profit"]
-                            <-Map 7 [SIMPLE_EDGE]
-                              SHUFFLE [RS_38]
-                                PartitionCols:_col0
-                                Select Operator [SEL_5] (rows=4058 width=1119)
-                                  Output:["_col0"]
-                                  Filter Operator [FIL_87] (rows=4058 width=1119)
-                                    predicate:(d_moy BETWEEN 4 AND 10 and (d_year = 1998) and d_date_sk is not null)
-                                    TableScan [TS_3] (rows=73049 width=1119)
-                                      default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query28.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query28.q.out b/ql/src/test/results/clientpositive/perf/query28.q.out
index f7c5225..78129cf 100644
--- a/ql/src/test/results/clientpositive/perf/query28.q.out
+++ b/ql/src/test/results/clientpositive/perf/query28.q.out
@@ -104,13 +104,13 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE)
-Reducer 13 <- Map 12 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 13 (CUSTOM_SIMPLE_EDGE), Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
-Reducer 5 <- Map 4 (SIMPLE_EDGE)
-Reducer 7 <- Map 6 (SIMPLE_EDGE)
-Reducer 9 <- Map 8 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
+Reducer 4 <- Map 1 (SIMPLE_EDGE)
+Reducer 5 <- Map 1 (SIMPLE_EDGE)
+Reducer 6 <- Map 1 (SIMPLE_EDGE)
+Reducer 7 <- Map 1 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -124,34 +124,6 @@ Stage-0
             Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17"]
             Merge Join Operator [MERGEJOIN_58] (rows=1 width=2497)
               Conds:(Inner),(Inner),(Inner),(Inner),(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17"]
-            <-Reducer 11 [CUSTOM_SIMPLE_EDGE]
-              PARTITION_ONLY_SHUFFLE [RS_46]
-                Group By Operator [GBY_33] (rows=1 width=416)
-                  Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
-                <-Map 10 [SIMPLE_EDGE]
-                  SHUFFLE [RS_32]
-                    Group By Operator [GBY_31] (rows=21333171 width=88)
-                      Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
-                      Select Operator [SEL_30] (rows=21333171 width=88)
-                        Output:["ss_list_price"]
-                        Filter Operator [FIL_56] (rows=21333171 width=88)
-                          predicate:(ss_quantity BETWEEN 11 AND 15 and (ss_list_price BETWEEN 66 AND 76 or ss_coupon_amt BETWEEN 920 AND 1920 or ss_wholesale_cost BETWEEN 4 AND 24))
-                          TableScan [TS_28] (rows=575995635 width=88)
-                            default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
-            <-Reducer 13 [CUSTOM_SIMPLE_EDGE]
-              PARTITION_ONLY_SHUFFLE [RS_47]
-                Group By Operator [GBY_40] (rows=1 width=416)
-                  Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
-                <-Map 12 [SIMPLE_EDGE]
-                  SHUFFLE [RS_39]
-                    Group By Operator [GBY_38] (rows=21333171 width=88)
-                      Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
-                      Select Operator [SEL_37] (rows=21333171 width=88)
-                        Output:["ss_list_price"]
-                        Filter Operator [FIL_57] (rows=21333171 width=88)
-                          predicate:(ss_quantity BETWEEN 6 AND 10 and (ss_list_price BETWEEN 91 AND 101 or ss_coupon_amt BETWEEN 1430 AND 2430 or ss_wholesale_cost BETWEEN 32 AND 52))
-                          TableScan [TS_35] (rows=575995635 width=88)
-                            default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
             <-Reducer 2 [CUSTOM_SIMPLE_EDGE]
               PARTITION_ONLY_SHUFFLE [RS_42]
                 Group By Operator [GBY_5] (rows=1 width=416)
@@ -166,11 +138,11 @@ Stage-0
                           predicate:(ss_quantity BETWEEN 0 AND 5 and (ss_list_price BETWEEN 11 AND 21 or ss_coupon_amt BETWEEN 460 AND 1460 or ss_wholesale_cost BETWEEN 14 AND 34))
                           TableScan [TS_0] (rows=575995635 width=88)
                             default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
-            <-Reducer 5 [CUSTOM_SIMPLE_EDGE]
+            <-Reducer 4 [CUSTOM_SIMPLE_EDGE]
               PARTITION_ONLY_SHUFFLE [RS_43]
                 Group By Operator [GBY_12] (rows=1 width=416)
                   Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
-                <-Map 4 [SIMPLE_EDGE]
+                <-Map 1 [SIMPLE_EDGE]
                   SHUFFLE [RS_11]
                     Group By Operator [GBY_10] (rows=21333171 width=88)
                       Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
@@ -178,13 +150,12 @@ Stage-0
                         Output:["ss_list_price"]
                         Filter Operator [FIL_53] (rows=21333171 width=88)
                           predicate:(ss_quantity BETWEEN 26 AND 30 and (ss_list_price BETWEEN 28 AND 38 or ss_coupon_amt BETWEEN 2513 AND 3513 or ss_wholesale_cost BETWEEN 42 AND 62))
-                          TableScan [TS_7] (rows=575995635 width=88)
-                            default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
-            <-Reducer 7 [CUSTOM_SIMPLE_EDGE]
+                           Please refer to the previous TableScan [TS_0]
+            <-Reducer 5 [CUSTOM_SIMPLE_EDGE]
               PARTITION_ONLY_SHUFFLE [RS_44]
                 Group By Operator [GBY_19] (rows=1 width=416)
                   Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
-                <-Map 6 [SIMPLE_EDGE]
+                <-Map 1 [SIMPLE_EDGE]
                   SHUFFLE [RS_18]
                     Group By Operator [GBY_17] (rows=21333171 width=88)
                       Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
@@ -192,13 +163,12 @@ Stage-0
                         Output:["ss_list_price"]
                         Filter Operator [FIL_54] (rows=21333171 width=88)
                           predicate:(ss_quantity BETWEEN 21 AND 25 and (ss_list_price BETWEEN 135 AND 145 or ss_coupon_amt BETWEEN 14180 AND 15180 or ss_wholesale_cost BETWEEN 38 AND 58))
-                          TableScan [TS_14] (rows=575995635 width=88)
-                            default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
-            <-Reducer 9 [CUSTOM_SIMPLE_EDGE]
+                           Please refer to the previous TableScan [TS_0]
+            <-Reducer 6 [CUSTOM_SIMPLE_EDGE]
               PARTITION_ONLY_SHUFFLE [RS_45]
                 Group By Operator [GBY_26] (rows=1 width=416)
                   Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
-                <-Map 8 [SIMPLE_EDGE]
+                <-Map 1 [SIMPLE_EDGE]
                   SHUFFLE [RS_25]
                     Group By Operator [GBY_24] (rows=21333171 width=88)
                       Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
@@ -206,6 +176,31 @@ Stage-0
                         Output:["ss_list_price"]
                         Filter Operator [FIL_55] (rows=21333171 width=88)
                           predicate:(ss_quantity BETWEEN 16 AND 20 and (ss_list_price BETWEEN 142 AND 152 or ss_coupon_amt BETWEEN 3054 AND 4054 or ss_wholesale_cost BETWEEN 80 AND 100))
-                          TableScan [TS_21] (rows=575995635 width=88)
-                            default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
+                           Please refer to the previous TableScan [TS_0]
+            <-Reducer 7 [CUSTOM_SIMPLE_EDGE]
+              PARTITION_ONLY_SHUFFLE [RS_46]
+                Group By Operator [GBY_33] (rows=1 width=416)
+                  Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
+                <-Map 1 [SIMPLE_EDGE]
+                  SHUFFLE [RS_32]
+                    Group By Operator [GBY_31] (rows=21333171 width=88)
+                      Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
+                      Select Operator [SEL_30] (rows=21333171 width=88)
+                        Output:["ss_list_price"]
+                        Filter Operator [FIL_56] (rows=21333171 width=88)
+                          predicate:(ss_quantity BETWEEN 11 AND 15 and (ss_list_price BETWEEN 66 AND 76 or ss_coupon_amt BETWEEN 920 AND 1920 or ss_wholesale_cost BETWEEN 4 AND 24))
+                           Please refer to the previous TableScan [TS_0]
+            <-Reducer 8 [CUSTOM_SIMPLE_EDGE]
+              PARTITION_ONLY_SHUFFLE [RS_47]
+                Group By Operator [GBY_40] (rows=1 width=416)
+                  Output:["_col0","_col1","_col2"],aggregations:["avg(VALUE._col0)","count(VALUE._col1)","count(DISTINCT KEY._col0:0._col0)"]
+                <-Map 1 [SIMPLE_EDGE]
+                  SHUFFLE [RS_39]
+                    Group By Operator [GBY_38] (rows=21333171 width=88)
+                      Output:["_col0","_col1","_col2","_col3"],aggregations:["avg(ss_list_price)","count(ss_list_price)","count(DISTINCT ss_list_price)"],keys:ss_list_price
+                      Select Operator [SEL_37] (rows=21333171 width=88)
+                        Output:["ss_list_price"]
+                        Filter Operator [FIL_57] (rows=21333171 width=88)
+                          predicate:(ss_quantity BETWEEN 6 AND 10 and (ss_list_price BETWEEN 91 AND 101 or ss_coupon_amt BETWEEN 1430 AND 2430 or ss_wholesale_cost BETWEEN 32 AND 52))
+                           Please refer to the previous TableScan [TS_0]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query29.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query29.q.out b/ql/src/test/results/clientpositive/perf/query29.q.out
index 675bdd3..ec303d3 100644
--- a/ql/src/test/results/clientpositive/perf/query29.q.out
+++ b/ql/src/test/results/clientpositive/perf/query29.q.out
@@ -6,11 +6,11 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 10 <- Map 14 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 15 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
-Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 11 <- Map 14 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
+Reducer 12 <- Map 15 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 17 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 4 <- Map 16 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 Reducer 9 <- Map 13 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
@@ -36,7 +36,7 @@ Stage-0
                     Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["sum(_col17)","sum(_col22)","sum(_col3)"],keys:_col27, _col28, _col7, _col8
                     Merge Join Operator [MERGEJOIN_100] (rows=1020411534 width=88)
                       Conds:RS_43._col13=RS_44._col0(Inner),Output:["_col3","_col7","_col8","_col17","_col22","_col27","_col28"]
-                    <-Map 17 [SIMPLE_EDGE]
+                    <-Map 16 [SIMPLE_EDGE]
                       SHUFFLE [RS_44]
                         PartitionCols:_col0
                         Select Operator [SEL_36] (rows=462000 width=1436)
@@ -57,7 +57,7 @@ Stage-0
                               Output:["_col1","_col2","_col7","_col11","_col13","_col14","_col16"]
                               Merge Join Operator [MERGEJOIN_98] (rows=843315281 width=88)
                                 Conds:RS_30._col3=RS_31._col0(Inner),Output:["_col1","_col5","_col7","_col8","_col10","_col18","_col19"]
-                              <-Map 16 [SIMPLE_EDGE]
+                              <-Map 15 [SIMPLE_EDGE]
                                 SHUFFLE [RS_31]
                                   PartitionCols:_col0
                                   Select Operator [SEL_20] (rows=1704 width=1910)
@@ -71,15 +71,15 @@ Stage-0
                                   PartitionCols:_col3
                                   Merge Join Operator [MERGEJOIN_97] (rows=766650239 width=88)
                                     Conds:RS_27._col6=RS_28._col0(Inner),Output:["_col1","_col3","_col5","_col7","_col8","_col10"]
-                                  <-Map 15 [SIMPLE_EDGE]
+                                  <-Map 14 [SIMPLE_EDGE]
                                     SHUFFLE [RS_28]
                                       PartitionCols:_col0
                                       Select Operator [SEL_17] (rows=4058 width=1119)
                                         Output:["_col0"]
                                         Filter Operator [FIL_91] (rows=4058 width=1119)
                                           predicate:(d_moy BETWEEN 2 AND 5 and (d_year = 2000) and d_date_sk is not null)
-                                          TableScan [TS_15] (rows=73049 width=1119)
-                                            default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                          TableScan [TS_12] (rows=73049 width=1119)
+                                            default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                                   <-Reducer 10 [SIMPLE_EDGE]
                                     SHUFFLE [RS_27]
                                       PartitionCols:_col6
@@ -92,8 +92,7 @@ Stage-0
                                             Output:["_col0"]
                                             Filter Operator [FIL_90] (rows=18262 width=1119)
                                               predicate:((d_moy = 2) and (d_year = 2000) and d_date_sk is not null)
-                                              TableScan [TS_12] (rows=73049 width=1119)
-                                                default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                               Please refer to the previous TableScan [TS_12]
                                       <-Reducer 9 [SIMPLE_EDGE]
                                         SHUFFLE [RS_24]
                                           PartitionCols:_col0

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query30.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query30.q.out b/ql/src/test/results/clientpositive/perf/query30.q.out
index 4b7d949..abe92b5 100644
--- a/ql/src/test/results/clientpositive/perf/query30.q.out
+++ b/ql/src/test/results/clientpositive/perf/query30.q.out
@@ -59,16 +59,16 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Reducer 16 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 14 <- Map 13 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE)
-Reducer 15 <- Map 18 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
-Reducer 16 <- Reducer 15 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 14 (SIMPLE_EDGE)
+Reducer 13 <- Map 11 (SIMPLE_EDGE), Map 14 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-Reducer 7 <- Map 11 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
-Reducer 8 <- Map 12 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+Reducer 6 <- Map 5 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Reducer 10 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 9 <- Map 5 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -90,12 +90,35 @@ Stage-0
                     Output:["_col2","_col6","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20"]
                     Merge Join Operator [MERGEJOIN_105] (rows=96800003 width=860)
                       Conds:RS_58._col0=RS_59._col0(Inner),Output:["_col1","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col18","_col19","_col20"]
-                    <-Reducer 10 [SIMPLE_EDGE]
+                    <-Reducer 2 [SIMPLE_EDGE]
+                      SHUFFLE [RS_58]
+                        PartitionCols:_col0
+                        Merge Join Operator [MERGEJOIN_99] (rows=88000001 width=860)
+                          Conds:RS_55._col2=RS_56._col0(Inner),Output:["_col0","_col1","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13"]
+                        <-Map 5 [SIMPLE_EDGE]
+                          SHUFFLE [RS_56]
+                            PartitionCols:_col0
+                            Select Operator [SEL_5] (rows=20000000 width=1014)
+                              Output:["_col0"]
+                              Filter Operator [FIL_92] (rows=20000000 width=1014)
+                                predicate:((ca_state = 'IL') and ca_address_sk is not null)
+                                TableScan [TS_3] (rows=40000000 width=1014)
+                                  default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_state"]
+                        <-Map 1 [SIMPLE_EDGE]
+                          SHUFFLE [RS_55]
+                            PartitionCols:_col2
+                            Select Operator [SEL_2] (rows=80000000 width=860)
+                              Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13"]
+                              Filter Operator [FIL_91] (rows=80000000 width=860)
+                                predicate:(c_customer_sk is not null and c_current_addr_sk is not null)
+                                TableScan [TS_0] (rows=80000000 width=860)
+                                  default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_customer_id","c_current_addr_sk","c_salutation","c_first_name","c_last_name","c_preferred_cust_flag","c_birth_day","c_birth_month","c_birth_year","c_birth_country","c_login","c_email_address","c_last_review_date"]
+                    <-Reducer 8 [SIMPLE_EDGE]
                       SHUFFLE [RS_59]
                         PartitionCols:_col0
                         Merge Join Operator [MERGEJOIN_104] (rows=24200000 width=1014)
                           Conds:RS_51._col1=RS_52._col2(Left Outer),Output:["_col0","_col2","_col3","_col4"]
-                        <-Reducer 16 [SIMPLE_EDGE]
+                        <-Reducer 10 [SIMPLE_EDGE]
                           SHUFFLE [RS_52]
                             PartitionCols:_col2
                             Select Operator [SEL_50] (rows=11000000 width=1014)
@@ -106,112 +129,85 @@ Stage-0
                                   Output:["_col0","_col2"]
                                   Group By Operator [GBY_44] (rows=22000000 width=1014)
                                     Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
-                                  <-Reducer 15 [SIMPLE_EDGE]
+                                  <-Reducer 9 [SIMPLE_EDGE]
                                     SHUFFLE [RS_43]
                                       PartitionCols:_col0
                                       Group By Operator [GBY_42] (rows=44000000 width=1014)
                                         Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col7, _col1
                                         Merge Join Operator [MERGEJOIN_103] (rows=44000000 width=1014)
                                           Conds:RS_38._col2=RS_39._col0(Inner),Output:["_col1","_col3","_col7"]
-                                        <-Map 18 [SIMPLE_EDGE]
+                                        <-Map 5 [SIMPLE_EDGE]
                                           SHUFFLE [RS_39]
                                             PartitionCols:_col0
                                             Select Operator [SEL_34] (rows=40000000 width=1014)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_98] (rows=40000000 width=1014)
                                                 predicate:(ca_address_sk is not null and ca_state is not null)
-                                                TableScan [TS_32] (rows=40000000 width=1014)
-                                                  default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_state"]
-                                        <-Reducer 14 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Reducer 13 [SIMPLE_EDGE]
                                           SHUFFLE [RS_38]
                                             PartitionCols:_col2
                                             Merge Join Operator [MERGEJOIN_102] (rows=15838314 width=92)
                                               Conds:RS_35._col0=RS_36._col0(Inner),Output:["_col1","_col2","_col3"]
-                                            <-Map 13 [SIMPLE_EDGE]
+                                            <-Map 11 [SIMPLE_EDGE]
                                               SHUFFLE [RS_35]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_28] (rows=14398467 width=92)
                                                   Output:["_col0","_col1","_col2","_col3"]
                                                   Filter Operator [FIL_96] (rows=14398467 width=92)
                                                     predicate:(wr_returned_date_sk is not null and wr_returning_addr_sk is not null)
-                                                    TableScan [TS_26] (rows=14398467 width=92)
+                                                    TableScan [TS_6] (rows=14398467 width=92)
                                                       default@web_returns,web_returns,Tbl:COMPLETE,Col:NONE,Output:["wr_returned_date_sk","wr_returning_customer_sk","wr_returning_addr_sk","wr_return_amt"]
-                                            <-Map 17 [SIMPLE_EDGE]
+                                            <-Map 14 [SIMPLE_EDGE]
                                               SHUFFLE [RS_36]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_31] (rows=36524 width=1119)
                                                   Output:["_col0"]
                                                   Filter Operator [FIL_97] (rows=36524 width=1119)
                                                     predicate:((d_year = 2002) and d_date_sk is not null)
-                                                    TableScan [TS_29] (rows=73049 width=1119)
+                                                    TableScan [TS_9] (rows=73049 width=1119)
                                                       default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                        <-Reducer 9 [SIMPLE_EDGE]
+                        <-Reducer 7 [SIMPLE_EDGE]
                           SHUFFLE [RS_51]
                             PartitionCols:_col1
                             Select Operator [SEL_25] (rows=22000000 width=1014)
                               Output:["_col0","_col1","_col2"]
                               Group By Operator [GBY_24] (rows=22000000 width=1014)
                                 Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
-                              <-Reducer 8 [SIMPLE_EDGE]
+                              <-Reducer 6 [SIMPLE_EDGE]
                                 SHUFFLE [RS_23]
                                   PartitionCols:_col0, _col1
                                   Group By Operator [GBY_22] (rows=44000000 width=1014)
                                     Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col7, _col1
                                     Merge Join Operator [MERGEJOIN_101] (rows=44000000 width=1014)
                                       Conds:RS_18._col2=RS_19._col0(Inner),Output:["_col1","_col3","_col7"]
-                                    <-Map 12 [SIMPLE_EDGE]
+                                    <-Map 5 [SIMPLE_EDGE]
                                       SHUFFLE [RS_19]
                                         PartitionCols:_col0
                                         Select Operator [SEL_14] (rows=40000000 width=1014)
                                           Output:["_col0","_col1"]
                                           Filter Operator [FIL_95] (rows=40000000 width=1014)
                                             predicate:ca_address_sk is not null
-                                            TableScan [TS_12] (rows=40000000 width=1014)
-                                              default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_state"]
-                                    <-Reducer 7 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_3]
+                                    <-Reducer 12 [SIMPLE_EDGE]
                                       SHUFFLE [RS_18]
                                         PartitionCols:_col2
                                         Merge Join Operator [MERGEJOIN_100] (rows=15838314 width=92)
                                           Conds:RS_15._col0=RS_16._col0(Inner),Output:["_col1","_col2","_col3"]
                                         <-Map 11 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_16]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_11] (rows=36524 width=1119)
-                                              Output:["_col0"]
-                                              Filter Operator [FIL_94] (rows=36524 width=1119)
-                                                predicate:((d_year = 2002) and d_date_sk is not null)
-                                                TableScan [TS_9] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                        <-Map 6 [SIMPLE_EDGE]
                                           SHUFFLE [RS_15]
                                             PartitionCols:_col0
                                             Select Operator [SEL_8] (rows=14398467 width=92)
                                               Output:["_col0","_col1","_col2","_col3"]
                                               Filter Operator [FIL_93] (rows=14398467 width=92)
                                                 predicate:(wr_returned_date_sk is not null and wr_returning_addr_sk is not null and wr_returning_customer_sk is not null)
-                                                TableScan [TS_6] (rows=14398467 width=92)
-                                                  default@web_returns,web_returns,Tbl:COMPLETE,Col:NONE,Output:["wr_returned_date_sk","wr_returning_customer_sk","wr_returning_addr_sk","wr_return_amt"]
-                    <-Reducer 2 [SIMPLE_EDGE]
-                      SHUFFLE [RS_58]
-                        PartitionCols:_col0
-                        Merge Join Operator [MERGEJOIN_99] (rows=88000001 width=860)
-                          Conds:RS_55._col2=RS_56._col0(Inner),Output:["_col0","_col1","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13"]
-                        <-Map 1 [SIMPLE_EDGE]
-                          SHUFFLE [RS_55]
-                            PartitionCols:_col2
-                            Select Operator [SEL_2] (rows=80000000 width=860)
-                              Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13"]
-                              Filter Operator [FIL_91] (rows=80000000 width=860)
-                                predicate:(c_customer_sk is not null and c_current_addr_sk is not null)
-                                TableScan [TS_0] (rows=80000000 width=860)
-                                  default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_customer_id","c_current_addr_sk","c_salutation","c_first_name","c_last_name","c_preferred_cust_flag","c_birth_day","c_birth_month","c_birth_year","c_birth_country","c_login","c_email_address","c_last_review_date"]
-                        <-Map 5 [SIMPLE_EDGE]
-                          SHUFFLE [RS_56]
-                            PartitionCols:_col0
-                            Select Operator [SEL_5] (rows=20000000 width=1014)
-                              Output:["_col0"]
-                              Filter Operator [FIL_92] (rows=20000000 width=1014)
-                                predicate:((ca_state = 'IL') and ca_address_sk is not null)
-                                TableScan [TS_3] (rows=40000000 width=1014)
-                                  default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_state"]
+                                                 Please refer to the previous TableScan [TS_6]
+                                        <-Map 14 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_16]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_11] (rows=36524 width=1119)
+                                              Output:["_col0"]
+                                              Filter Operator [FIL_94] (rows=36524 width=1119)
+                                                predicate:((d_year = 2002) and d_date_sk is not null)
+                                                 Please refer to the previous TableScan [TS_9]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query31.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query31.q.out b/ql/src/test/results/clientpositive/perf/query31.q.out
index 3ed312d..9e3dad4 100644
--- a/ql/src/test/results/clientpositive/perf/query31.q.out
+++ b/ql/src/test/results/clientpositive/perf/query31.q.out
@@ -5,27 +5,27 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 13 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 14 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
+Reducer 10 <- Map 1 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
+Reducer 11 <- Map 24 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
 Reducer 12 <- Reducer 11 (SIMPLE_EDGE)
-Reducer 16 <- Map 15 (SIMPLE_EDGE), Map 19 (SIMPLE_EDGE)
-Reducer 17 <- Map 20 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-Reducer 18 <- Reducer 17 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 26 (SIMPLE_EDGE)
-Reducer 23 <- Map 27 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
-Reducer 24 <- Reducer 23 (SIMPLE_EDGE)
-Reducer 25 <- Reducer 24 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE), Reducer 37 (SIMPLE_EDGE)
-Reducer 29 <- Map 28 (SIMPLE_EDGE), Map 32 (SIMPLE_EDGE)
-Reducer 3 <- Map 8 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 30 <- Map 33 (SIMPLE_EDGE), Reducer 29 (SIMPLE_EDGE)
-Reducer 31 <- Reducer 30 (SIMPLE_EDGE)
-Reducer 35 <- Map 34 (SIMPLE_EDGE), Map 38 (SIMPLE_EDGE)
-Reducer 36 <- Map 39 (SIMPLE_EDGE), Reducer 35 (SIMPLE_EDGE)
-Reducer 37 <- Reducer 36 (SIMPLE_EDGE)
+Reducer 14 <- Map 13 (SIMPLE_EDGE), Map 25 (SIMPLE_EDGE)
+Reducer 15 <- Map 24 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+Reducer 16 <- Reducer 15 (SIMPLE_EDGE)
+Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
+Reducer 18 <- Map 13 (SIMPLE_EDGE), Map 25 (SIMPLE_EDGE)
+Reducer 19 <- Map 24 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
+Reducer 20 <- Reducer 19 (SIMPLE_EDGE)
+Reducer 21 <- Map 13 (SIMPLE_EDGE), Map 25 (SIMPLE_EDGE)
+Reducer 22 <- Map 24 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 23 <- Reducer 22 (SIMPLE_EDGE)
+Reducer 3 <- Map 24 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Reducer 12 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 5 <- Reducer 12 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
+Reducer 7 <- Map 1 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
+Reducer 8 <- Map 24 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -44,125 +44,88 @@ Stage-0
                 Merge Join Operator [MERGEJOIN_269] (rows=1149975359 width=88)
                   Conds:RS_125._col0=RS_126._col0(Inner),RS_125._col0=RS_127._col0(Inner),RS_125._col0=RS_128._col0(Inner),Output:["_col0","_col1","_col3","_col5","_col7","_col9","_col11"]
                 <-Reducer 12 [SIMPLE_EDGE]
-                  SHUFFLE [RS_126]
-                    PartitionCols:_col0
-                    Group By Operator [GBY_38] (rows=348477374 width=88)
-                      Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                    <-Reducer 11 [SIMPLE_EDGE]
-                      SHUFFLE [RS_37]
-                        PartitionCols:_col0
-                        Group By Operator [GBY_36] (rows=696954748 width=88)
-                          Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
-                          Merge Join Operator [MERGEJOIN_259] (rows=696954748 width=88)
-                            Conds:RS_32._col1=RS_33._col0(Inner),Output:["_col2","_col7"]
-                          <-Map 14 [SIMPLE_EDGE]
-                            SHUFFLE [RS_33]
-                              PartitionCols:_col0
-                              Select Operator [SEL_28] (rows=40000000 width=1014)
-                                Output:["_col0","_col1"]
-                                Filter Operator [FIL_243] (rows=40000000 width=1014)
-                                  predicate:(ca_address_sk is not null and ca_county is not null)
-                                  TableScan [TS_26] (rows=40000000 width=1014)
-                                    default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_county"]
-                          <-Reducer 10 [SIMPLE_EDGE]
-                            SHUFFLE [RS_32]
-                              PartitionCols:_col1
-                              Merge Join Operator [MERGEJOIN_258] (rows=633595212 width=88)
-                                Conds:RS_29._col0=RS_30._col0(Inner),Output:["_col1","_col2"]
-                              <-Map 13 [SIMPLE_EDGE]
-                                SHUFFLE [RS_30]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_25] (rows=18262 width=1119)
-                                    Output:["_col0"]
-                                    Filter Operator [FIL_242] (rows=18262 width=1119)
-                                      predicate:((d_qoy = 1) and (d_year = 1998) and d_date_sk is not null)
-                                      TableScan [TS_23] (rows=73049 width=1119)
-                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
-                              <-Map 9 [SIMPLE_EDGE]
-                                SHUFFLE [RS_29]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_22] (rows=575995635 width=88)
-                                    Output:["_col0","_col1","_col2"]
-                                    Filter Operator [FIL_241] (rows=575995635 width=88)
-                                      predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null)
-                                      TableScan [TS_20] (rows=575995635 width=88)
-                                        default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_addr_sk","ss_ext_sales_price"]
-                <-Reducer 18 [SIMPLE_EDGE]
                   SHUFFLE [RS_127]
                     PartitionCols:_col0
                     Group By Operator [GBY_58] (rows=348477374 width=88)
                       Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                    <-Reducer 17 [SIMPLE_EDGE]
+                    <-Reducer 11 [SIMPLE_EDGE]
                       SHUFFLE [RS_57]
                         PartitionCols:_col0
                         Group By Operator [GBY_56] (rows=696954748 width=88)
                           Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
                           Merge Join Operator [MERGEJOIN_261] (rows=696954748 width=88)
                             Conds:RS_52._col1=RS_53._col0(Inner),Output:["_col2","_col7"]
-                          <-Map 20 [SIMPLE_EDGE]
+                          <-Map 24 [SIMPLE_EDGE]
                             SHUFFLE [RS_53]
                               PartitionCols:_col0
                               Select Operator [SEL_48] (rows=40000000 width=1014)
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_246] (rows=40000000 width=1014)
                                   predicate:(ca_address_sk is not null and ca_county is not null)
-                                  TableScan [TS_46] (rows=40000000 width=1014)
+                                  TableScan [TS_6] (rows=40000000 width=1014)
                                     default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_county"]
-                          <-Reducer 16 [SIMPLE_EDGE]
+                          <-Reducer 10 [SIMPLE_EDGE]
                             SHUFFLE [RS_52]
                               PartitionCols:_col1
                               Merge Join Operator [MERGEJOIN_260] (rows=633595212 width=88)
                                 Conds:RS_49._col0=RS_50._col0(Inner),Output:["_col1","_col2"]
-                              <-Map 15 [SIMPLE_EDGE]
-                                SHUFFLE [RS_49]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_42] (rows=575995635 width=88)
-                                    Output:["_col0","_col1","_col2"]
-                                    Filter Operator [FIL_244] (rows=575995635 width=88)
-                                      predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null)
-                                      TableScan [TS_40] (rows=575995635 width=88)
-                                        default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_addr_sk","ss_ext_sales_price"]
-                              <-Map 19 [SIMPLE_EDGE]
+                              <-Map 13 [SIMPLE_EDGE]
                                 SHUFFLE [RS_50]
                                   PartitionCols:_col0
                                   Select Operator [SEL_45] (rows=18262 width=1119)
                                     Output:["_col0"]
                                     Filter Operator [FIL_245] (rows=18262 width=1119)
                                       predicate:((d_qoy = 3) and (d_year = 1998) and d_date_sk is not null)
-                                      TableScan [TS_43] (rows=73049 width=1119)
+                                      TableScan [TS_3] (rows=73049 width=1119)
                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
-                <-Reducer 25 [SIMPLE_EDGE]
+                              <-Map 1 [SIMPLE_EDGE]
+                                SHUFFLE [RS_49]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_42] (rows=575995635 width=88)
+                                    Output:["_col0","_col1","_col2"]
+                                    Filter Operator [FIL_244] (rows=575995635 width=88)
+                                      predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null)
+                                      TableScan [TS_0] (rows=575995635 width=88)
+                                        default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_addr_sk","ss_ext_sales_price"]
+                <-Reducer 17 [SIMPLE_EDGE]
                   SHUFFLE [RS_128]
                     PartitionCols:_col0
                     Merge Join Operator [MERGEJOIN_268] (rows=191667561 width=135)
                       Conds:RS_120._col0=RS_121._col0(Inner),RS_120._col0=RS_122._col0(Inner),Output:["_col0","_col1","_col3","_col5"]
-                    <-Reducer 24 [SIMPLE_EDGE]
+                    <-Reducer 16 [SIMPLE_EDGE]
                       SHUFFLE [RS_120]
                         PartitionCols:_col0
                         Group By Operator [GBY_78] (rows=87121617 width=135)
                           Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                        <-Reducer 23 [SIMPLE_EDGE]
+                        <-Reducer 15 [SIMPLE_EDGE]
                           SHUFFLE [RS_77]
                             PartitionCols:_col0
                             Group By Operator [GBY_76] (rows=174243235 width=135)
                               Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
                               Merge Join Operator [MERGEJOIN_263] (rows=174243235 width=135)
                                 Conds:RS_72._col1=RS_73._col0(Inner),Output:["_col2","_col7"]
-                              <-Map 27 [SIMPLE_EDGE]
+                              <-Map 24 [SIMPLE_EDGE]
                                 SHUFFLE [RS_73]
                                   PartitionCols:_col0
                                   Select Operator [SEL_68] (rows=40000000 width=1014)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_249] (rows=40000000 width=1014)
                                       predicate:(ca_address_sk is not null and ca_county is not null)
-                                      TableScan [TS_66] (rows=40000000 width=1014)
-                                        default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_county"]
-                              <-Reducer 22 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_6]
+                              <-Reducer 14 [SIMPLE_EDGE]
                                 SHUFFLE [RS_72]
                                   PartitionCols:_col1
                                   Merge Join Operator [MERGEJOIN_262] (rows=158402938 width=135)
                                     Conds:RS_69._col0=RS_70._col0(Inner),Output:["_col1","_col2"]
-                                  <-Map 21 [SIMPLE_EDGE]
+                                  <-Map 13 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_70]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_65] (rows=18262 width=1119)
+                                        Output:["_col0"]
+                                        Filter Operator [FIL_248] (rows=18262 width=1119)
+                                          predicate:((d_qoy = 1) and (d_year = 1998) and d_date_sk is not null)
+                                           Please refer to the previous TableScan [TS_3]
+                                  <-Map 25 [SIMPLE_EDGE]
                                     SHUFFLE [RS_69]
                                       PartitionCols:_col0
                                       Select Operator [SEL_62] (rows=144002668 width=135)
@@ -171,103 +134,88 @@ Stage-0
                                           predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null)
                                           TableScan [TS_60] (rows=144002668 width=135)
                                             default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_bill_addr_sk","ws_ext_sales_price"]
-                                  <-Map 26 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_70]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_65] (rows=18262 width=1119)
-                                        Output:["_col0"]
-                                        Filter Operator [FIL_248] (rows=18262 width=1119)
-                                          predicate:((d_qoy = 1) and (d_year = 1998) and d_date_sk is not null)
-                                          TableScan [TS_63] (rows=73049 width=1119)
-                                            default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
-                    <-Reducer 31 [SIMPLE_EDGE]
+                    <-Reducer 20 [SIMPLE_EDGE]
                       SHUFFLE [RS_121]
                         PartitionCols:_col0
                         Group By Operator [GBY_98] (rows=87121617 width=135)
                           Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                        <-Reducer 30 [SIMPLE_EDGE]
+                        <-Reducer 19 [SIMPLE_EDGE]
                           SHUFFLE [RS_97]
                             PartitionCols:_col0
                             Group By Operator [GBY_96] (rows=174243235 width=135)
                               Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
                               Merge Join Operator [MERGEJOIN_265] (rows=174243235 width=135)
                                 Conds:RS_92._col1=RS_93._col0(Inner),Output:["_col2","_col7"]
-                              <-Map 33 [SIMPLE_EDGE]
+                              <-Map 24 [SIMPLE_EDGE]
                                 SHUFFLE [RS_93]
                                   PartitionCols:_col0
                                   Select Operator [SEL_88] (rows=40000000 width=1014)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_252] (rows=40000000 width=1014)
                                       predicate:(ca_address_sk is not null and ca_county is not null)
-                                      TableScan [TS_86] (rows=40000000 width=1014)
-                                        default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_county"]
-                              <-Reducer 29 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_6]
+                              <-Reducer 18 [SIMPLE_EDGE]
                                 SHUFFLE [RS_92]
                                   PartitionCols:_col1
                                   Merge Join Operator [MERGEJOIN_264] (rows=158402938 width=135)
                                     Conds:RS_89._col0=RS_90._col0(Inner),Output:["_col1","_col2"]
-                                  <-Map 28 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_89]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_82] (rows=144002668 width=135)
-                                        Output:["_col0","_col1","_col2"]
-                                        Filter Operator [FIL_250] (rows=144002668 width=135)
-                                          predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null)
-                                          TableScan [TS_80] (rows=144002668 width=135)
-                                            default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_bill_addr_sk","ws_ext_sales_price"]
-                                  <-Map 32 [SIMPLE_EDGE]
+                                  <-Map 13 [SIMPLE_EDGE]
                                     SHUFFLE [RS_90]
                                       PartitionCols:_col0
                                       Select Operator [SEL_85] (rows=18262 width=1119)
                                         Output:["_col0"]
                                         Filter Operator [FIL_251] (rows=18262 width=1119)
                                           predicate:((d_qoy = 2) and (d_year = 1998) and d_date_sk is not null)
-                                          TableScan [TS_83] (rows=73049 width=1119)
-                                            default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
-                    <-Reducer 37 [SIMPLE_EDGE]
+                                           Please refer to the previous TableScan [TS_3]
+                                  <-Map 25 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_89]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_82] (rows=144002668 width=135)
+                                        Output:["_col0","_col1","_col2"]
+                                        Filter Operator [FIL_250] (rows=144002668 width=135)
+                                          predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null)
+                                           Please refer to the previous TableScan [TS_60]
+                    <-Reducer 23 [SIMPLE_EDGE]
                       SHUFFLE [RS_122]
                         PartitionCols:_col0
                         Group By Operator [GBY_118] (rows=87121617 width=135)
                           Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                        <-Reducer 36 [SIMPLE_EDGE]
+                        <-Reducer 22 [SIMPLE_EDGE]
                           SHUFFLE [RS_117]
                             PartitionCols:_col0
                             Group By Operator [GBY_116] (rows=174243235 width=135)
                               Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
                               Merge Join Operator [MERGEJOIN_267] (rows=174243235 width=135)
                                 Conds:RS_112._col1=RS_113._col0(Inner),Output:["_col2","_col7"]
-                              <-Map 39 [SIMPLE_EDGE]
+                              <-Map 24 [SIMPLE_EDGE]
                                 SHUFFLE [RS_113]
                                   PartitionCols:_col0
                                   Select Operator [SEL_108] (rows=40000000 width=1014)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_255] (rows=40000000 width=1014)
                                       predicate:(ca_address_sk is not null and ca_county is not null)
-                                      TableScan [TS_106] (rows=40000000 width=1014)
-                                        default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_county"]
-                              <-Reducer 35 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_6]
+                              <-Reducer 21 [SIMPLE_EDGE]
                                 SHUFFLE [RS_112]
                                   PartitionCols:_col1
                                   Merge Join Operator [MERGEJOIN_266] (rows=158402938 width=135)
                                     Conds:RS_109._col0=RS_110._col0(Inner),Output:["_col1","_col2"]
-                                  <-Map 34 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_109]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_102] (rows=144002668 width=135)
-                                        Output:["_col0","_col1","_col2"]
-                                        Filter Operator [FIL_253] (rows=144002668 width=135)
-                                          predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null)
-                                          TableScan [TS_100] (rows=144002668 width=135)
-                                            default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_bill_addr_sk","ws_ext_sales_price"]
-                                  <-Map 38 [SIMPLE_EDGE]
+                                  <-Map 13 [SIMPLE_EDGE]
                                     SHUFFLE [RS_110]
                                       PartitionCols:_col0
                                       Select Operator [SEL_105] (rows=18262 width=1119)
                                         Output:["_col0"]
                                         Filter Operator [FIL_254] (rows=18262 width=1119)
                                           predicate:((d_qoy = 3) and (d_year = 1998) and d_date_sk is not null)
-                                          TableScan [TS_103] (rows=73049 width=1119)
-                                            default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
+                                           Please refer to the previous TableScan [TS_3]
+                                  <-Map 25 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_109]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_102] (rows=144002668 width=135)
+                                        Output:["_col0","_col1","_col2"]
+                                        Filter Operator [FIL_253] (rows=144002668 width=135)
+                                          predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null)
+                                           Please refer to the previous TableScan [TS_60]
                 <-Reducer 4 [SIMPLE_EDGE]
                   SHUFFLE [RS_125]
                     PartitionCols:_col0
@@ -280,20 +228,27 @@ Stage-0
                           Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
                           Merge Join Operator [MERGEJOIN_257] (rows=696954748 width=88)
                             Conds:RS_12._col1=RS_13._col0(Inner),Output:["_col2","_col7"]
-                          <-Map 8 [SIMPLE_EDGE]
+                          <-Map 24 [SIMPLE_EDGE]
                             SHUFFLE [RS_13]
                               PartitionCols:_col0
                               Select Operator [SEL_8] (rows=40000000 width=1014)
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_240] (rows=40000000 width=1014)
                                   predicate:(ca_address_sk is not null and ca_county is not null)
-                                  TableScan [TS_6] (rows=40000000 width=1014)
-                                    default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_county"]
+                                   Please refer to the previous TableScan [TS_6]
                           <-Reducer 2 [SIMPLE_EDGE]
                             SHUFFLE [RS_12]
                               PartitionCols:_col1
                               Merge Join Operator [MERGEJOIN_256] (rows=633595212 width=88)
                                 Conds:RS_9._col0=RS_10._col0(Inner),Output:["_col1","_col2"]
+                              <-Map 13 [SIMPLE_EDGE]
+                                SHUFFLE [RS_10]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_5] (rows=18262 width=1119)
+                                    Output:["_col0"]
+                                    Filter Operator [FIL_239] (rows=18262 width=1119)
+                                      predicate:((d_qoy = 2) and (d_year = 1998) and d_date_sk is not null)
+                                       Please refer to the previous TableScan [TS_3]
                               <-Map 1 [SIMPLE_EDGE]
                                 SHUFFLE [RS_9]
                                   PartitionCols:_col0
@@ -301,15 +256,46 @@ Stage-0
                                     Output:["_col0","_col1","_col2"]
                                     Filter Operator [FIL_238] (rows=575995635 width=88)
                                       predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null)
-                                      TableScan [TS_0] (rows=575995635 width=88)
-                                        default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_addr_sk","ss_ext_sales_price"]
-                              <-Map 7 [SIMPLE_EDGE]
-                                SHUFFLE [RS_10]
+                                       Please refer to the previous TableScan [TS_0]
+                <-Reducer 9 [SIMPLE_EDGE]
+                  SHUFFLE [RS_126]
+                    PartitionCols:_col0
+                    Group By Operator [GBY_38] (rows=348477374 width=88)
+                      Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
+                    <-Reducer 8 [SIMPLE_EDGE]
+                      SHUFFLE [RS_37]
+                        PartitionCols:_col0
+                        Group By Operator [GBY_36] (rows=696954748 width=88)
+                          Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col7
+                          Merge Join Operator [MERGEJOIN_259] (rows=696954748 width=88)
+                            Conds:RS_32._col1=RS_33._col0(Inner),Output:["_col2","_col7"]
+                          <-Map 24 [SIMPLE_EDGE]
+                            SHUFFLE [RS_33]
+                              PartitionCols:_col0
+                              Select Operator [SEL_28] (rows=40000000 width=1014)
+                                Output:["_col0","_col1"]
+                                Filter Operator [FIL_243] (rows=40000000 width=1014)
+                                  predicate:(ca_address_sk is not null and ca_county is not null)
+                                   Please refer to the previous TableScan [TS_6]
+                          <-Reducer 7 [SIMPLE_EDGE]
+                            SHUFFLE [RS_32]
+                              PartitionCols:_col1
+                              Merge Join Operator [MERGEJOIN_258] (rows=633595212 width=88)
+                                Conds:RS_29._col0=RS_30._col0(Inner),Output:["_col1","_col2"]
+                              <-Map 13 [SIMPLE_EDGE]
+                                SHUFFLE [RS_30]
                                   PartitionCols:_col0
-                                  Select Operator [SEL_5] (rows=18262 width=1119)
+                                  Select Operator [SEL_25] (rows=18262 width=1119)
                                     Output:["_col0"]
-                                    Filter Operator [FIL_239] (rows=18262 width=1119)
-                                      predicate:((d_qoy = 2) and (d_year = 1998) and d_date_sk is not null)
-                                      TableScan [TS_3] (rows=73049 width=1119)
-                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
+                                    Filter Operator [FIL_242] (rows=18262 width=1119)
+                                      predicate:((d_qoy = 1) and (d_year = 1998) and d_date_sk is not null)
+                                       Please refer to the previous TableScan [TS_3]
+                              <-Map 1 [SIMPLE_EDGE]
+                                SHUFFLE [RS_29]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_22] (rows=575995635 width=88)
+                                    Output:["_col0","_col1","_col2"]
+                                    Filter Operator [FIL_241] (rows=575995635 width=88)
+                                      predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null)
+                                       Please refer to the previous TableScan [TS_0]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query32.q.out b/ql/src/test/results/clientpositive/perf/query32.q.out
index 5a6514b..c9820b3 100644
--- a/ql/src/test/results/clientpositive/perf/query32.q.out
+++ b/ql/src/test/results/clientpositive/perf/query32.q.out
@@ -39,11 +39,11 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 7 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 4 <- Map 8 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
-Reducer 9 <- Map 10 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 6 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -63,7 +63,7 @@ Stage-0
                   predicate:(_col5 > _col1)
                   Merge Join Operator [MERGEJOIN_59] (rows=696935432 width=135)
                     Conds:RS_27._col0=RS_28._col0(Inner),RS_28._col0=RS_29._col0(Inner),Output:["_col1","_col5"]
-                  <-Map 7 [SIMPLE_EDGE]
+                  <-Map 8 [SIMPLE_EDGE]
                     SHUFFLE [RS_28]
                       PartitionCols:_col0
                       Select Operator [SEL_16] (rows=231000 width=1436)
@@ -95,7 +95,7 @@ Stage-0
                                       predicate:(cs_sold_date_sk is not null and cs_item_sk is not null)
                                       TableScan [TS_0] (rows=287989836 width=135)
                                         default@catalog_sales,cs,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk","cs_ext_discount_amt"]
-                              <-Map 6 [SIMPLE_EDGE]
+                              <-Map 7 [SIMPLE_EDGE]
                                 SHUFFLE [RS_7]
                                   PartitionCols:_col0
                                   Select Operator [SEL_5] (rows=8116 width=1119)
@@ -104,29 +104,27 @@ Stage-0
                                       predicate:(d_date BETWEEN '2000-01-27' AND '2000-04-27' and d_date_sk is not null)
                                       TableScan [TS_3] (rows=73049 width=1119)
                                         default@date_dim,d,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                  <-Reducer 9 [SIMPLE_EDGE]
+                  <-Reducer 6 [SIMPLE_EDGE]
                     SHUFFLE [RS_29]
                       PartitionCols:_col0
                       Select Operator [SEL_26] (rows=316788826 width=135)
                         Output:["_col0","_col1"]
                         Merge Join Operator [MERGEJOIN_58] (rows=316788826 width=135)
                           Conds:RS_23._col0=RS_24._col0(Inner),Output:["_col1","_col2"]
-                        <-Map 10 [SIMPLE_EDGE]
-                          SHUFFLE [RS_24]
-                            PartitionCols:_col0
-                            Select Operator [SEL_22] (rows=8116 width=1119)
-                              Output:["_col0"]
-                              Filter Operator [FIL_56] (rows=8116 width=1119)
-                                predicate:(d_date BETWEEN '2000-01-27' AND '2000-04-27' and d_date_sk is not null)
-                                TableScan [TS_20] (rows=73049 width=1119)
-                                  default@date_dim,d,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                        <-Map 8 [SIMPLE_EDGE]
+                        <-Map 1 [SIMPLE_EDGE]
                           SHUFFLE [RS_23]
                             PartitionCols:_col0
                             Select Operator [SEL_19] (rows=287989836 width=135)
                               Output:["_col0","_col1","_col2"]
                               Filter Operator [FIL_55] (rows=287989836 width=135)
                                 predicate:(cs_sold_date_sk is not null and cs_item_sk is not null)
-                                TableScan [TS_17] (rows=287989836 width=135)
-                                  default@catalog_sales,cs,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk","cs_ext_discount_amt"]
+                                 Please refer to the previous TableScan [TS_0]
+                        <-Map 7 [SIMPLE_EDGE]
+                          SHUFFLE [RS_24]
+                            PartitionCols:_col0
+                            Select Operator [SEL_22] (rows=8116 width=1119)
+                              Output:["_col0"]
+                              Filter Operator [FIL_56] (rows=8116 width=1119)
+                                predicate:(d_date BETWEEN '2000-01-27' AND '2000-04-27' and d_date_sk is not null)
+                                 Please refer to the previous TableScan [TS_3]
 


[30/50] [abbrv] hive git commit: HIVE-15725 : Make it possible to run checkstyle for a specific module (Peter Vary via Thejas Nair)

Posted by we...@apache.org.
HIVE-15725 : Make it possible to run checkstyle for a specific module (Peter Vary via Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c90aa83f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c90aa83f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c90aa83f

Branch: refs/heads/hive-14535
Commit: c90aa83fbefe256288ef5df8a2d10b0e355f4795
Parents: 877bbf0
Author: Peter Vary <pv...@cloudera.com>
Authored: Sun May 14 19:32:20 2017 -0700
Committer: Thejas M Nair <th...@hortonworks.com>
Committed: Sun May 14 19:32:20 2017 -0700

----------------------------------------------------------------------
 checkstyle/checkstyle.xml | 4 ++--
 pom.xml                   | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/c90aa83f/checkstyle/checkstyle.xml
----------------------------------------------------------------------
diff --git a/checkstyle/checkstyle.xml b/checkstyle/checkstyle.xml
index 1c5b305..bd6b2f8 100644
--- a/checkstyle/checkstyle.xml
+++ b/checkstyle/checkstyle.xml
@@ -65,7 +65,7 @@
   <module name="Translation"/>
 
   <module name="Header">
-    <property name="headerFile" value="checkstyle/asf.header"/>
+    <property name="headerFile" value="${basedir}/asf.header"/>
   </module>
 
   <!-- Maximum file line length -->
@@ -78,7 +78,7 @@
 
   <!-- List of files to ignore -->
   <module name="SuppressionFilter">
-    <property name="file" value="checkstyle/suppressions.xml"/>
+    <property name="file" value="${basedir}/suppressions.xml"/>
   </module>
 
   <!-- Ignore JavaCC/JJTree files -->

http://git-wip-us.apache.org/repos/asf/hive/blob/c90aa83f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e0aae27..30fa50b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -71,7 +71,7 @@
     <maven.repo.local>${settings.localRepository}</maven.repo.local>
     <hive.path.to.root>.</hive.path.to.root>
     <hive.jdbc.driver.classifier>standalone</hive.jdbc.driver.classifier>
-    <checkstyle.conf.dir>${hive.path.to.root}/checkstyle</checkstyle.conf.dir>
+    <checkstyle.conf.dir>${basedir}/${hive.path.to.root}/checkstyle</checkstyle.conf.dir>
 
     <!-- Test Properties -->
     <test.extra.path></test.extra.path>
@@ -1056,6 +1056,7 @@
         <artifactId>maven-checkstyle-plugin</artifactId>
         <configuration>
           <configLocation>${checkstyle.conf.dir}/checkstyle.xml</configLocation>
+          <propertyExpansion>basedir=${checkstyle.conf.dir}</propertyExpansion>
         </configuration>
       </plugin>
       <plugin>


[18/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query14.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query14.q.out b/ql/src/test/results/clientpositive/perf/query14.q.out
index d6675bc..58889c7 100644
--- a/ql/src/test/results/clientpositive/perf/query14.q.out
+++ b/ql/src/test/results/clientpositive/perf/query14.q.out
@@ -1,5 +1,5 @@
-Warning: Shuffle Join MERGEJOIN[908][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 112' is a cross product
-Warning: Shuffle Join MERGEJOIN[907][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 60' is a cross product
+Warning: Shuffle Join MERGEJOIN[908][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 16' is a cross product
+Warning: Shuffle Join MERGEJOIN[907][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 12' is a cross product
 Warning: Shuffle Join MERGEJOIN[906][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 'Reducer 5' is a cross product
 PREHOOK: query: explain
 with  cross_items as
@@ -210,80 +210,80 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 103 <- Map 102 (SIMPLE_EDGE), Map 106 (SIMPLE_EDGE)
-Reducer 104 <- Map 107 (SIMPLE_EDGE), Reducer 103 (SIMPLE_EDGE)
-Reducer 105 <- Reducer 104 (SIMPLE_EDGE), Union 92 (CONTAINS)
-Reducer 109 <- Map 108 (SIMPLE_EDGE), Map 113 (SIMPLE_EDGE), Union 110 (CONTAINS)
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE), Union 3 (CONTAINS)
-Reducer 111 <- Union 110 (CUSTOM_SIMPLE_EDGE)
-Reducer 112 <- Reducer 111 (CUSTOM_SIMPLE_EDGE), Reducer 123 (CUSTOM_SIMPLE_EDGE), Reducer 134 (CUSTOM_SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 115 <- Map 114 (SIMPLE_EDGE), Map 116 (SIMPLE_EDGE), Union 110 (CONTAINS)
-Reducer 118 <- Map 117 (SIMPLE_EDGE), Map 119 (SIMPLE_EDGE), Union 110 (CONTAINS)
-Reducer 121 <- Map 120 (SIMPLE_EDGE), Map 124 (SIMPLE_EDGE), Union 122 (CONTAINS)
-Reducer 123 <- Union 122 (CUSTOM_SIMPLE_EDGE)
-Reducer 126 <- Map 125 (SIMPLE_EDGE), Map 127 (SIMPLE_EDGE), Union 122 (CONTAINS)
-Reducer 129 <- Map 128 (SIMPLE_EDGE), Map 130 (SIMPLE_EDGE), Union 122 (CONTAINS)
-Reducer 132 <- Map 131 (SIMPLE_EDGE), Map 135 (SIMPLE_EDGE)
-Reducer 133 <- Map 136 (SIMPLE_EDGE), Reducer 132 (SIMPLE_EDGE), Reducer 139 (SIMPLE_EDGE)
-Reducer 134 <- Reducer 133 (SIMPLE_EDGE)
-Reducer 138 <- Map 137 (SIMPLE_EDGE), Reducer 145 (SIMPLE_EDGE)
-Reducer 139 <- Reducer 138 (SIMPLE_EDGE)
-Reducer 14 <- Map 13 (SIMPLE_EDGE), Map 15 (SIMPLE_EDGE), Union 3 (CONTAINS)
-Reducer 141 <- Map 140 (SIMPLE_EDGE), Map 146 (SIMPLE_EDGE)
-Reducer 142 <- Map 147 (SIMPLE_EDGE), Reducer 141 (SIMPLE_EDGE)
-Reducer 143 <- Reducer 142 (SIMPLE_EDGE), Union 144 (CONTAINS)
-Reducer 145 <- Union 144 (SIMPLE_EDGE)
-Reducer 149 <- Map 148 (SIMPLE_EDGE), Map 152 (SIMPLE_EDGE)
-Reducer 150 <- Map 153 (SIMPLE_EDGE), Reducer 149 (SIMPLE_EDGE)
-Reducer 151 <- Reducer 150 (SIMPLE_EDGE), Union 144 (CONTAINS)
-Reducer 155 <- Map 154 (SIMPLE_EDGE), Map 158 (SIMPLE_EDGE)
-Reducer 156 <- Map 159 (SIMPLE_EDGE), Reducer 155 (SIMPLE_EDGE)
-Reducer 157 <- Reducer 156 (SIMPLE_EDGE), Union 144 (CONTAINS)
-Reducer 17 <- Map 16 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE), Union 18 (CONTAINS)
-Reducer 19 <- Union 18 (CUSTOM_SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE), Union 3 (CONTAINS)
-Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 23 (SIMPLE_EDGE), Union 18 (CONTAINS)
-Reducer 25 <- Map 24 (SIMPLE_EDGE), Map 26 (SIMPLE_EDGE), Union 18 (CONTAINS)
-Reducer 28 <- Map 27 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
-Reducer 29 <- Map 32 (SIMPLE_EDGE), Reducer 28 (SIMPLE_EDGE), Reducer 35 (SIMPLE_EDGE)
-Reducer 30 <- Reducer 29 (SIMPLE_EDGE)
-Reducer 34 <- Map 33 (SIMPLE_EDGE), Reducer 41 (SIMPLE_EDGE)
-Reducer 35 <- Reducer 34 (SIMPLE_EDGE)
-Reducer 37 <- Map 36 (SIMPLE_EDGE), Map 42 (SIMPLE_EDGE)
-Reducer 38 <- Map 43 (SIMPLE_EDGE), Reducer 37 (SIMPLE_EDGE)
-Reducer 39 <- Reducer 38 (SIMPLE_EDGE), Union 40 (CONTAINS)
+Reducer 11 <- Union 10 (CUSTOM_SIMPLE_EDGE)
+Reducer 12 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 38 (CUSTOM_SIMPLE_EDGE), Reducer 47 (CUSTOM_SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 13 <- Map 1 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE), Union 14 (CONTAINS)
+Reducer 15 <- Union 14 (CUSTOM_SIMPLE_EDGE)
+Reducer 16 <- Reducer 15 (CUSTOM_SIMPLE_EDGE), Reducer 56 (CUSTOM_SIMPLE_EDGE), Reducer 65 (CUSTOM_SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 18 <- Map 17 (SIMPLE_EDGE), Map 88 (SIMPLE_EDGE), Union 19 (CONTAINS)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE), Union 3 (CONTAINS)
+Reducer 20 <- Union 19 (CUSTOM_SIMPLE_EDGE)
+Reducer 21 <- Map 17 (SIMPLE_EDGE), Map 97 (SIMPLE_EDGE)
+Reducer 22 <- Map 96 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 23 <- Reducer 22 (SIMPLE_EDGE), Union 24 (CONTAINS)
+Reducer 25 <- Union 24 (SIMPLE_EDGE)
+Reducer 26 <- Map 96 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE)
+Reducer 27 <- Reducer 26 (SIMPLE_EDGE)
+Reducer 28 <- Map 96 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE), Reducer 92 (SIMPLE_EDGE)
+Reducer 29 <- Reducer 28 (SIMPLE_EDGE)
+Reducer 30 <- Map 17 (SIMPLE_EDGE), Map 98 (SIMPLE_EDGE)
+Reducer 31 <- Map 96 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE)
+Reducer 32 <- Reducer 31 (SIMPLE_EDGE), Union 24 (CONTAINS)
+Reducer 33 <- Map 17 (SIMPLE_EDGE), Map 99 (SIMPLE_EDGE)
+Reducer 34 <- Map 96 (SIMPLE_EDGE), Reducer 33 (SIMPLE_EDGE)
+Reducer 35 <- Reducer 34 (SIMPLE_EDGE), Union 24 (CONTAINS)
+Reducer 36 <- Map 17 (SIMPLE_EDGE), Map 88 (SIMPLE_EDGE), Union 37 (CONTAINS)
+Reducer 38 <- Union 37 (CUSTOM_SIMPLE_EDGE)
+Reducer 39 <- Map 17 (SIMPLE_EDGE), Map 97 (SIMPLE_EDGE)
 Reducer 4 <- Union 3 (CUSTOM_SIMPLE_EDGE)
-Reducer 41 <- Union 40 (SIMPLE_EDGE)
-Reducer 45 <- Map 44 (SIMPLE_EDGE), Map 48 (SIMPLE_EDGE)
-Reducer 46 <- Map 49 (SIMPLE_EDGE), Reducer 45 (SIMPLE_EDGE)
-Reducer 47 <- Reducer 46 (SIMPLE_EDGE), Union 40 (CONTAINS)
-Reducer 5 <- Reducer 19 (CUSTOM_SIMPLE_EDGE), Reducer 30 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 51 <- Map 50 (SIMPLE_EDGE), Map 54 (SIMPLE_EDGE)
-Reducer 52 <- Map 55 (SIMPLE_EDGE), Reducer 51 (SIMPLE_EDGE)
-Reducer 53 <- Reducer 52 (SIMPLE_EDGE), Union 40 (CONTAINS)
-Reducer 57 <- Map 56 (SIMPLE_EDGE), Map 61 (SIMPLE_EDGE), Union 58 (CONTAINS)
-Reducer 59 <- Union 58 (CUSTOM_SIMPLE_EDGE)
-Reducer 60 <- Reducer 59 (CUSTOM_SIMPLE_EDGE), Reducer 71 (CUSTOM_SIMPLE_EDGE), Reducer 82 (CUSTOM_SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 63 <- Map 62 (SIMPLE_EDGE), Map 64 (SIMPLE_EDGE), Union 58 (CONTAINS)
-Reducer 66 <- Map 65 (SIMPLE_EDGE), Map 67 (SIMPLE_EDGE), Union 58 (CONTAINS)
-Reducer 69 <- Map 68 (SIMPLE_EDGE), Map 72 (SIMPLE_EDGE), Union 70 (CONTAINS)
+Reducer 40 <- Map 96 (SIMPLE_EDGE), Reducer 39 (SIMPLE_EDGE)
+Reducer 41 <- Reducer 40 (SIMPLE_EDGE), Union 42 (CONTAINS)
+Reducer 43 <- Union 42 (SIMPLE_EDGE)
+Reducer 44 <- Map 96 (SIMPLE_EDGE), Reducer 43 (SIMPLE_EDGE)
+Reducer 45 <- Reducer 44 (SIMPLE_EDGE)
+Reducer 46 <- Map 96 (SIMPLE_EDGE), Reducer 45 (SIMPLE_EDGE), Reducer 94 (SIMPLE_EDGE)
+Reducer 47 <- Reducer 46 (SIMPLE_EDGE)
+Reducer 48 <- Map 17 (SIMPLE_EDGE), Map 98 (SIMPLE_EDGE)
+Reducer 49 <- Map 96 (SIMPLE_EDGE), Reducer 48 (SIMPLE_EDGE)
+Reducer 5 <- Reducer 20 (CUSTOM_SIMPLE_EDGE), Reducer 29 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 50 <- Reducer 49 (SIMPLE_EDGE), Union 42 (CONTAINS)
+Reducer 51 <- Map 17 (SIMPLE_EDGE), Map 99 (SIMPLE_EDGE)
+Reducer 52 <- Map 96 (SIMPLE_EDGE), Reducer 51 (SIMPLE_EDGE)
+Reducer 53 <- Reducer 52 (SIMPLE_EDGE), Union 42 (CONTAINS)
+Reducer 54 <- Map 17 (SIMPLE_EDGE), Map 88 (SIMPLE_EDGE), Union 55 (CONTAINS)
+Reducer 56 <- Union 55 (CUSTOM_SIMPLE_EDGE)
+Reducer 57 <- Map 17 (SIMPLE_EDGE), Map 97 (SIMPLE_EDGE)
+Reducer 58 <- Map 96 (SIMPLE_EDGE), Reducer 57 (SIMPLE_EDGE)
+Reducer 59 <- Reducer 58 (SIMPLE_EDGE), Union 60 (CONTAINS)
+Reducer 61 <- Union 60 (SIMPLE_EDGE)
+Reducer 62 <- Map 96 (SIMPLE_EDGE), Reducer 61 (SIMPLE_EDGE)
+Reducer 63 <- Reducer 62 (SIMPLE_EDGE)
+Reducer 64 <- Map 96 (SIMPLE_EDGE), Reducer 63 (SIMPLE_EDGE), Reducer 95 (SIMPLE_EDGE)
+Reducer 65 <- Reducer 64 (SIMPLE_EDGE)
+Reducer 66 <- Map 17 (SIMPLE_EDGE), Map 98 (SIMPLE_EDGE)
+Reducer 67 <- Map 96 (SIMPLE_EDGE), Reducer 66 (SIMPLE_EDGE)
+Reducer 68 <- Reducer 67 (SIMPLE_EDGE), Union 60 (CONTAINS)
+Reducer 69 <- Map 17 (SIMPLE_EDGE), Map 99 (SIMPLE_EDGE)
 Reducer 7 <- Union 6 (SIMPLE_EDGE)
-Reducer 71 <- Union 70 (CUSTOM_SIMPLE_EDGE)
-Reducer 74 <- Map 73 (SIMPLE_EDGE), Map 75 (SIMPLE_EDGE), Union 70 (CONTAINS)
-Reducer 77 <- Map 76 (SIMPLE_EDGE), Map 78 (SIMPLE_EDGE), Union 70 (CONTAINS)
+Reducer 70 <- Map 96 (SIMPLE_EDGE), Reducer 69 (SIMPLE_EDGE)
+Reducer 71 <- Reducer 70 (SIMPLE_EDGE), Union 60 (CONTAINS)
+Reducer 73 <- Map 72 (SIMPLE_EDGE), Map 76 (SIMPLE_EDGE), Union 3 (CONTAINS)
+Reducer 74 <- Map 72 (SIMPLE_EDGE), Map 76 (SIMPLE_EDGE), Union 10 (CONTAINS)
+Reducer 75 <- Map 72 (SIMPLE_EDGE), Map 76 (SIMPLE_EDGE), Union 14 (CONTAINS)
+Reducer 77 <- Map 76 (SIMPLE_EDGE), Map 89 (SIMPLE_EDGE), Union 19 (CONTAINS)
+Reducer 78 <- Map 76 (SIMPLE_EDGE), Map 89 (SIMPLE_EDGE), Union 37 (CONTAINS)
+Reducer 79 <- Map 76 (SIMPLE_EDGE), Map 89 (SIMPLE_EDGE), Union 55 (CONTAINS)
 Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
-Reducer 80 <- Map 79 (SIMPLE_EDGE), Map 83 (SIMPLE_EDGE)
-Reducer 81 <- Map 84 (SIMPLE_EDGE), Reducer 80 (SIMPLE_EDGE), Reducer 87 (SIMPLE_EDGE)
-Reducer 82 <- Reducer 81 (SIMPLE_EDGE)
-Reducer 86 <- Map 85 (SIMPLE_EDGE), Reducer 93 (SIMPLE_EDGE)
-Reducer 87 <- Reducer 86 (SIMPLE_EDGE)
-Reducer 89 <- Map 88 (SIMPLE_EDGE), Map 94 (SIMPLE_EDGE)
-Reducer 90 <- Map 95 (SIMPLE_EDGE), Reducer 89 (SIMPLE_EDGE)
-Reducer 91 <- Reducer 90 (SIMPLE_EDGE), Union 92 (CONTAINS)
-Reducer 93 <- Union 92 (SIMPLE_EDGE)
-Reducer 97 <- Map 100 (SIMPLE_EDGE), Map 96 (SIMPLE_EDGE)
-Reducer 98 <- Map 101 (SIMPLE_EDGE), Reducer 97 (SIMPLE_EDGE)
-Reducer 99 <- Reducer 98 (SIMPLE_EDGE), Union 92 (CONTAINS)
+Reducer 81 <- Map 80 (SIMPLE_EDGE), Map 84 (SIMPLE_EDGE), Union 3 (CONTAINS)
+Reducer 82 <- Map 80 (SIMPLE_EDGE), Map 84 (SIMPLE_EDGE), Union 10 (CONTAINS)
+Reducer 83 <- Map 80 (SIMPLE_EDGE), Map 84 (SIMPLE_EDGE), Union 14 (CONTAINS)
+Reducer 85 <- Map 84 (SIMPLE_EDGE), Map 90 (SIMPLE_EDGE), Union 19 (CONTAINS)
+Reducer 86 <- Map 84 (SIMPLE_EDGE), Map 90 (SIMPLE_EDGE), Union 37 (CONTAINS)
+Reducer 87 <- Map 84 (SIMPLE_EDGE), Map 90 (SIMPLE_EDGE), Union 55 (CONTAINS)
+Reducer 9 <- Map 1 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE), Union 10 (CONTAINS)
+Reducer 92 <- Map 91 (SIMPLE_EDGE), Map 93 (SIMPLE_EDGE)
+Reducer 94 <- Map 100 (SIMPLE_EDGE), Map 93 (SIMPLE_EDGE)
+Reducer 95 <- Map 101 (SIMPLE_EDGE), Map 93 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -302,1249 +302,1191 @@ Stage-0
                 Group By Operator [GBY_585] (rows=1016388080 width=405)
                   Output:["_col0","_col1","_col2","_col3","_col5","_col6"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3, KEY._col4
                 <-Union 6 [SIMPLE_EDGE]
-                  <-Reducer 112 [CONTAINS]
+                  <-Reducer 12 [CONTAINS]
                     Reduce Output Operator [RS_584]
                       PartitionCols:_col0, _col1, _col2, _col3, _col4
                       Group By Operator [GBY_583] (rows=2032776160 width=405)
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["sum(_col4)","sum(_col5)"],keys:_col0, _col1, _col2, _col3, 0
-                        Select Operator [SEL_580] (rows=58081078 width=432)
+                        Select Operator [SEL_385] (rows=116155905 width=432)
                           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-                          Filter Operator [FIL_579] (rows=58081078 width=432)
+                          Filter Operator [FIL_384] (rows=116155905 width=432)
                             predicate:(_col5 > _col1)
-                            Merge Join Operator [MERGEJOIN_908] (rows=174243235 width=432)
+                            Merge Join Operator [MERGEJOIN_907] (rows=348467716 width=432)
                               Conds:(Inner),(Inner),Output:["_col1","_col2","_col3","_col4","_col5","_col6"]
-                            <-Reducer 111 [CUSTOM_SIMPLE_EDGE]
-                              PARTITION_ONLY_SHUFFLE [RS_575]
-                                Select Operator [SEL_432] (rows=1 width=8)
-                                  Filter Operator [FIL_431] (rows=1 width=8)
+                            <-Reducer 11 [CUSTOM_SIMPLE_EDGE]
+                              PARTITION_ONLY_SHUFFLE [RS_380]
+                                Select Operator [SEL_237] (rows=1 width=8)
+                                  Filter Operator [FIL_236] (rows=1 width=8)
                                     predicate:(sq_count_check(_col0) <= 1)
-                                    Group By Operator [GBY_429] (rows=1 width=8)
+                                    Group By Operator [GBY_234] (rows=1 width=8)
                                       Output:["_col0"],aggregations:["count()"]
-                                      Select Operator [SEL_424] (rows=1 width=8)
-                                        Group By Operator [GBY_423] (rows=1 width=8)
+                                      Select Operator [SEL_229] (rows=1 width=8)
+                                        Group By Operator [GBY_228] (rows=1 width=8)
                                           Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                        <-Union 110 [CUSTOM_SIMPLE_EDGE]
-                                          <-Reducer 109 [CONTAINS]
-                                            Reduce Output Operator [RS_422]
-                                              Group By Operator [GBY_421] (rows=1 width=8)
+                                        <-Union 10 [CUSTOM_SIMPLE_EDGE]
+                                          <-Reducer 74 [CONTAINS]
+                                            Reduce Output Operator [RS_227]
+                                              Group By Operator [GBY_226] (rows=1 width=8)
                                                 Output:["_col0"],aggregations:["count(_col0)"]
-                                                Select Operator [SEL_420] (rows=1108786976 width=108)
+                                                Select Operator [SEL_225] (rows=1108786976 width=108)
                                                   Output:["_col0"]
-                                                  Select Operator [SEL_397] (rows=633595212 width=88)
+                                                  Select Operator [SEL_212] (rows=316788826 width=135)
                                                     Output:["_col0"]
-                                                    Merge Join Operator [MERGEJOIN_891] (rows=633595212 width=88)
-                                                      Conds:RS_394._col0=RS_395._col0(Inner),Output:["_col1"]
-                                                    <-Map 108 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_394]
-                                                        PartitionCols:_col0
-                                                        Select Operator [SEL_390] (rows=575995635 width=88)
-                                                          Output:["_col0","_col1"]
-                                                          Filter Operator [FIL_836] (rows=575995635 width=88)
-                                                            predicate:ss_sold_date_sk is not null
-                                                            TableScan [TS_388] (rows=575995635 width=88)
-                                                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_quantity"]
-                                                    <-Map 113 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_395]
+                                                    Merge Join Operator [MERGEJOIN_877] (rows=316788826 width=135)
+                                                      Conds:RS_209._col0=RS_210._col0(Inner),Output:["_col1"]
+                                                    <-Map 76 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_210]
                                                         PartitionCols:_col0
-                                                        Select Operator [SEL_393] (rows=8116 width=1119)
+                                                        Select Operator [SEL_208] (rows=8116 width=1119)
                                                           Output:["_col0"]
-                                                          Filter Operator [FIL_837] (rows=8116 width=1119)
-                                                            predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
-                                                            TableScan [TS_391] (rows=73049 width=1119)
+                                                          Filter Operator [FIL_814] (rows=8116 width=1119)
+                                                            predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
+                                                            TableScan [TS_13] (rows=73049 width=1119)
                                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                          <-Reducer 115 [CONTAINS]
-                                            Reduce Output Operator [RS_422]
-                                              Group By Operator [GBY_421] (rows=1 width=8)
-                                                Output:["_col0"],aggregations:["count(_col0)"]
-                                                Select Operator [SEL_420] (rows=1108786976 width=108)
-                                                  Output:["_col0"]
-                                                  Select Operator [SEL_407] (rows=316788826 width=135)
-                                                    Output:["_col0"]
-                                                    Merge Join Operator [MERGEJOIN_892] (rows=316788826 width=135)
-                                                      Conds:RS_404._col0=RS_405._col0(Inner),Output:["_col1"]
-                                                    <-Map 114 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_404]
+                                                    <-Map 72 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_209]
                                                         PartitionCols:_col0
-                                                        Select Operator [SEL_400] (rows=287989836 width=135)
+                                                        Select Operator [SEL_205] (rows=287989836 width=135)
                                                           Output:["_col0","_col1"]
-                                                          Filter Operator [FIL_838] (rows=287989836 width=135)
+                                                          Filter Operator [FIL_813] (rows=287989836 width=135)
                                                             predicate:cs_sold_date_sk is not null
-                                                            TableScan [TS_398] (rows=287989836 width=135)
+                                                            TableScan [TS_10] (rows=287989836 width=135)
                                                               default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_quantity"]
-                                                    <-Map 116 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_405]
+                                          <-Reducer 82 [CONTAINS]
+                                            Reduce Output Operator [RS_227]
+                                              Group By Operator [GBY_226] (rows=1 width=8)
+                                                Output:["_col0"],aggregations:["count(_col0)"]
+                                                Select Operator [SEL_225] (rows=1108786976 width=108)
+                                                  Output:["_col0"]
+                                                  Select Operator [SEL_224] (rows=158402938 width=135)
+                                                    Output:["_col0"]
+                                                    Merge Join Operator [MERGEJOIN_878] (rows=158402938 width=135)
+                                                      Conds:RS_221._col0=RS_222._col0(Inner),Output:["_col1"]
+                                                    <-Map 84 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_222]
                                                         PartitionCols:_col0
-                                                        Select Operator [SEL_403] (rows=8116 width=1119)
+                                                        Select Operator [SEL_220] (rows=8116 width=1119)
                                                           Output:["_col0"]
-                                                          Filter Operator [FIL_839] (rows=8116 width=1119)
+                                                          Filter Operator [FIL_816] (rows=8116 width=1119)
                                                             predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
-                                                            TableScan [TS_401] (rows=73049 width=1119)
+                                                            TableScan [TS_25] (rows=73049 width=1119)
                                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                          <-Reducer 118 [CONTAINS]
-                                            Reduce Output Operator [RS_422]
-                                              Group By Operator [GBY_421] (rows=1 width=8)
-                                                Output:["_col0"],aggregations:["count(_col0)"]
-                                                Select Operator [SEL_420] (rows=1108786976 width=108)
-                                                  Output:["_col0"]
-                                                  Select Operator [SEL_419] (rows=158402938 width=135)
-                                                    Output:["_col0"]
-                                                    Merge Join Operator [MERGEJOIN_893] (rows=158402938 width=135)
-                                                      Conds:RS_416._col0=RS_417._col0(Inner),Output:["_col1"]
-                                                    <-Map 117 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_416]
+                                                    <-Map 80 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_221]
                                                         PartitionCols:_col0
-                                                        Select Operator [SEL_412] (rows=144002668 width=135)
+                                                        Select Operator [SEL_217] (rows=144002668 width=135)
                                                           Output:["_col0","_col1"]
-                                                          Filter Operator [FIL_840] (rows=144002668 width=135)
+                                                          Filter Operator [FIL_815] (rows=144002668 width=135)
                                                             predicate:ws_sold_date_sk is not null
-                                                            TableScan [TS_410] (rows=144002668 width=135)
+                                                            TableScan [TS_22] (rows=144002668 width=135)
                                                               default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_quantity"]
-                                                    <-Map 119 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_417]
+                                          <-Reducer 9 [CONTAINS]
+                                            Reduce Output Operator [RS_227]
+                                              Group By Operator [GBY_226] (rows=1 width=8)
+                                                Output:["_col0"],aggregations:["count(_col0)"]
+                                                Select Operator [SEL_225] (rows=1108786976 width=108)
+                                                  Output:["_col0"]
+                                                  Select Operator [SEL_202] (rows=633595212 width=88)
+                                                    Output:["_col0"]
+                                                    Merge Join Operator [MERGEJOIN_876] (rows=633595212 width=88)
+                                                      Conds:RS_199._col0=RS_200._col0(Inner),Output:["_col1"]
+                                                    <-Map 17 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_200]
                                                         PartitionCols:_col0
-                                                        Select Operator [SEL_415] (rows=8116 width=1119)
+                                                        Select Operator [SEL_198] (rows=8116 width=1119)
                                                           Output:["_col0"]
-                                                          Filter Operator [FIL_841] (rows=8116 width=1119)
-                                                            predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
-                                                            TableScan [TS_413] (rows=73049 width=1119)
+                                                          Filter Operator [FIL_812] (rows=8116 width=1119)
+                                                            predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
+                                                            TableScan [TS_3] (rows=73049 width=1119)
                                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                            <-Reducer 123 [CUSTOM_SIMPLE_EDGE]
-                              PARTITION_ONLY_SHUFFLE [RS_576]
-                                Group By Operator [GBY_469] (rows=1 width=288)
+                                                    <-Map 1 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_199]
+                                                        PartitionCols:_col0
+                                                        Select Operator [SEL_195] (rows=575995635 width=88)
+                                                          Output:["_col0","_col1"]
+                                                          Filter Operator [FIL_811] (rows=575995635 width=88)
+                                                            predicate:ss_sold_date_sk is not null
+                                                            TableScan [TS_0] (rows=575995635 width=88)
+                                                              default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_quantity"]
+                            <-Reducer 38 [CUSTOM_SIMPLE_EDGE]
+                              PARTITION_ONLY_SHUFFLE [RS_381]
+                                Group By Operator [GBY_274] (rows=1 width=288)
                                   Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                <-Union 122 [CUSTOM_SIMPLE_EDGE]
-                                  <-Reducer 121 [CONTAINS]
-                                    Reduce Output Operator [RS_468]
-                                      Group By Operator [GBY_467] (rows=1 width=288)
+                                <-Union 37 [CUSTOM_SIMPLE_EDGE]
+                                  <-Reducer 36 [CONTAINS]
+                                    Reduce Output Operator [RS_273]
+                                      Group By Operator [GBY_272] (rows=1 width=288)
                                         Output:["_col0"],aggregations:["avg(_col0)"]
-                                        Select Operator [SEL_465] (rows=1108786976 width=108)
+                                        Select Operator [SEL_270] (rows=1108786976 width=108)
                                           Output:["_col0"]
-                                          Select Operator [SEL_442] (rows=633595212 width=88)
+                                          Select Operator [SEL_247] (rows=633595212 width=88)
                                             Output:["_col0","_col1"]
-                                            Merge Join Operator [MERGEJOIN_894] (rows=633595212 width=88)
-                                              Conds:RS_439._col0=RS_440._col0(Inner),Output:["_col1","_col2"]
-                                            <-Map 120 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_439]
+                                            Merge Join Operator [MERGEJOIN_879] (rows=633595212 width=88)
+                                              Conds:RS_244._col0=RS_245._col0(Inner),Output:["_col1","_col2"]
+                                            <-Map 17 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_245]
                                                 PartitionCols:_col0
-                                                Select Operator [SEL_435] (rows=575995635 width=88)
+                                                Select Operator [SEL_243] (rows=8116 width=1119)
+                                                  Output:["_col0"]
+                                                  Filter Operator [FIL_818] (rows=8116 width=1119)
+                                                    predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
+                                                     Please refer to the previous TableScan [TS_3]
+                                            <-Map 88 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_244]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_240] (rows=575995635 width=88)
                                                   Output:["_col0","_col1","_col2"]
-                                                  Filter Operator [FIL_842] (rows=575995635 width=88)
+                                                  Filter Operator [FIL_817] (rows=575995635 width=88)
                                                     predicate:ss_sold_date_sk is not null
-                                                    TableScan [TS_433] (rows=575995635 width=88)
+                                                    TableScan [TS_45] (rows=575995635 width=88)
                                                       default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_quantity","ss_list_price"]
-                                            <-Map 124 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_440]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_438] (rows=8116 width=1119)
-                                                  Output:["_col0"]
-                                                  Filter Operator [FIL_843] (rows=8116 width=1119)
-                                                    predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
-                                                    TableScan [TS_436] (rows=73049 width=1119)
-                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                  <-Reducer 126 [CONTAINS]
-                                    Reduce Output Operator [RS_468]
-                                      Group By Operator [GBY_467] (rows=1 width=288)
+                                  <-Reducer 78 [CONTAINS]
+                                    Reduce Output Operator [RS_273]
+                                      Group By Operator [GBY_272] (rows=1 width=288)
                                         Output:["_col0"],aggregations:["avg(_col0)"]
-                                        Select Operator [SEL_465] (rows=1108786976 width=108)
+                                        Select Operator [SEL_270] (rows=1108786976 width=108)
                                           Output:["_col0"]
-                                          Select Operator [SEL_452] (rows=316788826 width=135)
+                                          Select Operator [SEL_257] (rows=316788826 width=135)
                                             Output:["_col0","_col1"]
-                                            Merge Join Operator [MERGEJOIN_895] (rows=316788826 width=135)
-                                              Conds:RS_449._col0=RS_450._col0(Inner),Output:["_col1","_col2"]
-                                            <-Map 125 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_449]
+                                            Merge Join Operator [MERGEJOIN_880] (rows=316788826 width=135)
+                                              Conds:RS_254._col0=RS_255._col0(Inner),Output:["_col1","_col2"]
+                                            <-Map 76 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_255]
                                                 PartitionCols:_col0
-                                                Select Operator [SEL_445] (rows=287989836 width=135)
+                                                Select Operator [SEL_253] (rows=8116 width=1119)
+                                                  Output:["_col0"]
+                                                  Filter Operator [FIL_820] (rows=8116 width=1119)
+                                                    predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
+                                                     Please refer to the previous TableScan [TS_13]
+                                            <-Map 89 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_254]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_250] (rows=287989836 width=135)
                                                   Output:["_col0","_col1","_col2"]
-                                                  Filter Operator [FIL_844] (rows=287989836 width=135)
+                                                  Filter Operator [FIL_819] (rows=287989836 width=135)
                                                     predicate:cs_sold_date_sk is not null
-                                                    TableScan [TS_443] (rows=287989836 width=135)
+                                                    TableScan [TS_55] (rows=287989836 width=135)
                                                       default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_quantity","cs_list_price"]
-                                            <-Map 127 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_450]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_448] (rows=8116 width=1119)
-                                                  Output:["_col0"]
-                                                  Filter Operator [FIL_845] (rows=8116 width=1119)
-                                                    predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
-                                                    TableScan [TS_446] (rows=73049 width=1119)
-                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                  <-Reducer 129 [CONTAINS]
-                                    Reduce Output Operator [RS_468]
-                                      Group By Operator [GBY_467] (rows=1 width=288)
+                                  <-Reducer 86 [CONTAINS]
+                                    Reduce Output Operator [RS_273]
+                                      Group By Operator [GBY_272] (rows=1 width=288)
                                         Output:["_col0"],aggregations:["avg(_col0)"]
-                                        Select Operator [SEL_465] (rows=1108786976 width=108)
+                                        Select Operator [SEL_270] (rows=1108786976 width=108)
                                           Output:["_col0"]
-                                          Select Operator [SEL_464] (rows=158402938 width=135)
+                                          Select Operator [SEL_269] (rows=158402938 width=135)
                                             Output:["_col0","_col1"]
-                                            Merge Join Operator [MERGEJOIN_896] (rows=158402938 width=135)
-                                              Conds:RS_461._col0=RS_462._col0(Inner),Output:["_col1","_col2"]
-                                            <-Map 128 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_461]
+                                            Merge Join Operator [MERGEJOIN_881] (rows=158402938 width=135)
+                                              Conds:RS_266._col0=RS_267._col0(Inner),Output:["_col1","_col2"]
+                                            <-Map 84 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_267]
                                                 PartitionCols:_col0
-                                                Select Operator [SEL_457] (rows=144002668 width=135)
+                                                Select Operator [SEL_265] (rows=8116 width=1119)
+                                                  Output:["_col0"]
+                                                  Filter Operator [FIL_822] (rows=8116 width=1119)
+                                                    predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
+                                                     Please refer to the previous TableScan [TS_25]
+                                            <-Map 90 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_266]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_262] (rows=144002668 width=135)
                                                   Output:["_col0","_col1","_col2"]
-                                                  Filter Operator [FIL_846] (rows=144002668 width=135)
+                                                  Filter Operator [FIL_821] (rows=144002668 width=135)
                                                     predicate:ws_sold_date_sk is not null
-                                                    TableScan [TS_455] (rows=144002668 width=135)
+                                                    TableScan [TS_67] (rows=144002668 width=135)
                                                       default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_quantity","ws_list_price"]
-                                            <-Map 130 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_462]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_460] (rows=8116 width=1119)
-                                                  Output:["_col0"]
-                                                  Filter Operator [FIL_847] (rows=8116 width=1119)
-                                                    predicate:(d_year BETWEEN 1998 AND 2000 and d_date_sk is not null)
-                                                    TableScan [TS_458] (rows=73049 width=1119)
-                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                            <-Reducer 134 [CUSTOM_SIMPLE_EDGE]
-                              PARTITION_ONLY_SHUFFLE [RS_577]
-                                Group By Operator [GBY_573] (rows=174243235 width=135)
+                            <-Reducer 47 [CUSTOM_SIMPLE_EDGE]
+                              PARTITION_ONLY_SHUFFLE [RS_382]
+                                Group By Operator [GBY_378] (rows=348467716 width=135)
                                   Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(VALUE._col0)","count(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                <-Reducer 133 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_572]
+                                <-Reducer 46 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_377]
                                     PartitionCols:_col0, _col1, _col2
-                                    Group By Operator [GBY_571] (rows=348486471 width=135)
+                                    Group By Operator [GBY_376] (rows=696935432 width=135)
                                       Output:["_col0","_col1","_col2","_col3","_col4"],aggregations:["sum(_col3)","count()"],keys:_col0, _col1, _col2
-                                      Select Operator [SEL_569] (rows=348486471 width=135)
+                                      Select Operator [SEL_374] (rows=696935432 width=135)
                                         Output:["_col0","_col1","_col2","_col3"]
-                                        Merge Join Operator [MERGEJOIN_905] (rows=348486471 width=135)
-                                          Conds:RS_565._col1=RS_566._col0(Inner),RS_565._col1=RS_567._col0(Inner),Output:["_col2","_col3","_col8","_col9","_col10"]
-                                        <-Map 136 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_566]
+                                        Merge Join Operator [MERGEJOIN_890] (rows=696935432 width=135)
+                                          Conds:RS_370._col1=RS_371._col0(Inner),RS_370._col1=RS_372._col0(Inner),Output:["_col2","_col3","_col8","_col9","_col10"]
+                                        <-Map 96 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_371]
                                             PartitionCols:_col0
-                                            Select Operator [SEL_479] (rows=462000 width=1436)
+                                            Select Operator [SEL_284] (rows=462000 width=1436)
                                               Output:["_col0","_col1","_col2","_col3"]
-                                              Filter Operator [FIL_850] (rows=462000 width=1436)
+                                              Filter Operator [FIL_825] (rows=462000 width=1436)
                                                 predicate:i_item_sk is not null
-                                                TableScan [TS_477] (rows=462000 width=1436)
+                                                TableScan [TS_89] (rows=462000 width=1436)
                                                   default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id"]
-                                        <-Reducer 132 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_565]
-                                            PartitionCols:_col1
-                                            Merge Join Operator [MERGEJOIN_897] (rows=158402938 width=135)
-                                              Conds:RS_562._col0=RS_563._col0(Inner),Output:["_col1","_col2","_col3"]
-                                            <-Map 131 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_562]
+                                        <-Reducer 45 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_372]
+                                            PartitionCols:_col0
+                                            Group By Operator [GBY_365] (rows=254100 width=1436)
+                                              Output:["_col0"],keys:KEY._col0
+                                            <-Reducer 44 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_364]
                                                 PartitionCols:_col0
-                                                Select Operator [SEL_473] (rows=144002668 width=135)
-                                                  Output:["_col0","_col1","_col2","_col3"]
-                                                  Filter Operator [FIL_848] (rows=144002668 width=135)
-                                                    predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
-                                                    TableScan [TS_471] (rows=144002668 width=135)
-                                                      default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_quantity","ws_list_price"]
-                                            <-Map 135 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_563]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_476] (rows=18262 width=1119)
-                                                  Output:["_col0"]
-                                                  Filter Operator [FIL_849] (rows=18262 width=1119)
-                                                    predicate:((d_year = 2000) and (d_moy = 11) and d_date_sk is not null)
-                                                    TableScan [TS_474] (rows=73049 width=1119)
-                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                                        <-Reducer 139 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_567]
-                                            PartitionCols:_col0
-                                            Group By Operator [GBY_560] (rows=254100 width=1436)
-                                              Output:["_col0"],keys:KEY._col0
-                                            <-Reducer 138 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_559]
-                                                PartitionCols:_col0
-                                                Group By Operator [GBY_558] (rows=508200 width=1436)
+                                                Group By Operator [GBY_363] (rows=508200 width=1436)
                                                   Output:["_col0"],keys:_col0
-                                                  Merge Join Operator [MERGEJOIN_904] (rows=508200 width=1436)
-                                                    Conds:RS_554._col1, _col2, _col3=RS_555._col0, _col1, _col2(Inner),Output:["_col0"]
-                                                  <-Map 137 [SIMPLE_EDGE]
-                                                    SHUFFLE [RS_554]
+                                                  Merge Join Operator [MERGEJOIN_889] (rows=508200 width=1436)
+                                                    Conds:RS_359._col1, _col2, _col3=RS_360._col0, _col1, _col2(Inner),Output:["_col0"]
+                                                  <-Map 96 [SIMPLE_EDGE]
+                                                    SHUFFLE [RS_359]
                                                       PartitionCols:_col1, _col2, _col3
-                                                      Select Operator [SEL_482] (rows=462000 width=1436)
+                                                      Select Operator [SEL_287] (rows=462000 width=1436)
                                                         Output:["_col0","_col1","_col2","_col3"]
-                                                        Filter Operator [FIL_851] (rows=462000 width=1436)
+                                                        Filter Operator [FIL_826] (rows=462000 width=1436)
                                                           predicate:(i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_item_sk is not null)
-                                                          TableScan [TS_480] (rows=462000 width=1436)
-                                                            default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id"]
-                                                  <-Reducer 145 [SIMPLE_EDGE]
-                                                    SHUFFLE [RS_555]
+                                                           Please refer to the previous TableScan [TS_89]
+                                                  <-Reducer 43 [SIMPLE_EDGE]
+                                                    SHUFFLE [RS_360]
                                                       PartitionCols:_col0, _col1, _col2
-                                                      Select Operator [SEL_553] (rows=1 width=108)
+                                                      Select Operator [SEL_358] (rows=1 width=108)
                                                         Output:["_col0","_col1","_col2"]
-                                                        Filter Operator [FIL_552] (rows=1 width=108)
+                                                        Filter Operator [FIL_357] (rows=1 width=108)
                                                           predicate:(_col3 = 3)
-                                                          Group By Operator [GBY_551] (rows=304916424 width=108)
+                                                          Group By Operator [GBY_356] (rows=304916424 width=108)
                                                             Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                                          <-Union 144 [SIMPLE_EDGE]
-                                                            <-Reducer 143 [CONTAINS]
-                                                              Reduce Output Operator [RS_550]
+                                                          <-Union 42 [SIMPLE_EDGE]
+                                                            <-Reducer 41 [CONTAINS]
+                                                              Reduce Output Operator [RS_355]
                                                                 PartitionCols:_col0, _col1, _col2
-                                                                Group By Operator [GBY_549] (rows=609832849 width=108)
+                                                                Group By Operator [GBY_354] (rows=609832849 width=108)
                                                                   Output:["_col0","_col1","_col2","_col3"],aggregations:["count(_col3)"],keys:_col0, _col1, _col2
-                                                                  Group By Operator [GBY_502] (rows=348477374 width=88)
+                                                                  Group By Operator [GBY_307] (rows=348477374 width=88)
                                                                     Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                                                  <-Reducer 142 [SIMPLE_EDGE]
-                                                                    SHUFFLE [RS_501]
+                                                                  <-Reducer 40 [SIMPLE_EDGE]
+                                                                    SHUFFLE [RS_306]
                                                                       PartitionCols:_col0, _col1, _col2
-                                                                      Group By Operator [GBY_500] (rows=696954748 width=88)
+                                                                      Group By Operator [GBY_305] (rows=696954748 width=88)
                                                                         Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
-                                                                        Select Operator [SEL_498] (rows=696954748 width=88)
+                                                                        Select Operator [SEL_303] (rows=696954748 width=88)
                                                                           Output:["_col0","_col1","_col2"]
-                                                                          Merge Join Operator [MERGEJOIN_899] (rows=696954748 width=88)
-                                                                            Conds:RS_495._col1=RS_496._col0(Inner),Output:["_col5","_col6","_col7"]
-                                                                          <-Map 147 [SIMPLE_EDGE]
-                                                                            SHUFFLE [RS_496]
+                                                                          Merge Join Operator [MERGEJOIN_884] (rows=696954748 width=88)
+                                                                            Conds:RS_300._col1=RS_301._col0(Inner),Output:["_col5","_col6","_col7"]
+                                                                          <-Map 96 [SIMPLE_EDGE]
+                                                                            SHUFFLE [RS_301]
                                                                               PartitionCols:_col0
-                                                                              Select Operator [SEL_491] (rows=462000 width=1436)
+                                                                              Select Operator [SEL_296] (rows=462000 width=1436)
                                                                                 Output:["_col0","_col1","_col2","_col3"]
-                                                                                Filter Operator [FIL_854] (rows=462000 width=1436)
+                                                                                Filter Operator [FIL_829] (rows=462000 width=1436)
                                                                                   predicate:(i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null)
-                                                                                  TableScan [TS_489] (rows=462000 width=1436)
-                                                                                    default@item,iss,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id"]
-                                                                          <-Reducer 141 [SIMPLE_EDGE]
-                                                                            SHUFFLE [RS_495]
+                                                                                   Please refer to the previous TableScan [TS_89]
+                                                                          <-Reducer 39 [SIMPLE_EDGE]
+                                                                            SHUFFLE [RS_300]
                                                                               PartitionCols:_col1
-                                                                              Merge Join Operator [MERGEJOIN_898] (rows=633595212 width=88)
-                                                                                Conds:RS_492._col0=RS_493._col0(Inner),Output:["_col1"]
-                                                                              <-Map 140 [SIMPLE_EDGE]
-                                                                                SHUFFLE [RS_492]
+                                                                              Merge Join Operator [MERGEJOIN_883] (rows=633595212 width=88)
+                                                                                Conds:RS_297._col0=RS_298._col0(Inner),Output:["_col1"]
+                                                                              <-Map 17 [SIMPLE_EDGE]
+                                                                                SHUFFLE [RS_298]
                                                                                   PartitionCols:_col0
-                                                                                  Select Operator [SEL_485] (rows=575995635 width=88)
+                                                                                  Select Operator [SEL_293] (rows=8116 width=1119)
+                                                                                    Output:["_col0"]
+                                                                                    Filter Operator [FIL_828] (rows=8116 width=1119)
+                                                                                      predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
+                                                                                       Please refer to the previous TableScan [TS_3]
+                                                                              <-Map 97 [SIMPLE_EDGE]
+                                                                                SHUFFLE [RS_297]
+                                                                                  PartitionCols:_col0
+                                                                                  Select Operator [SEL_290] (rows=575995635 width=88)
                                                                                     Output:["_col0","_col1"]
-                                                                                    Filter Operator [FIL_852] (rows=575995635 width=88)
+                                                                                    Filter Operator [FIL_827] (rows=575995635 width=88)
                                                                                       predicate:(ss_item_sk is not null and ss_sold_date_sk is not null)
-                                                                                      TableScan [TS_483] (rows=575995635 width=88)
+                                                                                      TableScan [TS_95] (rows=575995635 width=88)
                                                                                         default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk"]
-                                                                              <-Map 146 [SIMPLE_EDGE]
-                                                                                SHUFFLE [RS_493]
-                                                                                  PartitionCols:_col0
-                                                                                  Select Operator [SEL_488] (rows=8116 width=1119)
-                                                                                    Output:["_col0"]
-                                                                                    Filter Operator [FIL_853] (rows=8116 width=1119)
-                                                                                      predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
-                                                                                      TableScan [TS_486] (rows=73049 width=1119)
-                                                                                        default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                                            <-Reducer 151 [CONTAINS]
-                                                              Reduce Output Operator [RS_550]
+                                                            <-Reducer 50 [CONTAINS]
+                                                              Reduce Output Operator [RS_355]
                                                                 PartitionCols:_col0, _col1, _col2
-                                                                Group By Operator [GBY_549] (rows=609832849 width=108)
+                                                                Group By Operator [GBY_354] (rows=609832849 width=108)
                                                                   Output:["_col0","_col1","_col2","_col3"],aggregations:["count(_col3)"],keys:_col0, _col1, _col2
-                                                                  Group By Operator [GBY_523] (rows=174233858 width=135)
+                                                                  Group By Operator [GBY_328] (rows=174233858 width=135)
                                                                     Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                                                  <-Reducer 150 [SIMPLE_EDGE]
-                                                                    SHUFFLE [RS_522]
+                                                                  <-Reducer 49 [SIMPLE_EDGE]
+                                                                    SHUFFLE [RS_327]
                                                                       PartitionCols:_col0, _col1, _col2
-                                                                      Group By Operator [GBY_521] (rows=348467716 width=135)
+                                                                      Group By Operator [GBY_326] (rows=348467716 width=135)
                                                                         Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
-                                                                        Select Operator [SEL_519] (rows=348467716 width=135)
+                                                                        Select Operator [SEL_324] (rows=348467716 width=135)
                                                                           Output:["_col0","_col1","_col2"]
-                                                                          Merge Join Operator [MERGEJOIN_901] (rows=348467716 width=135)
-                                                                            Conds:RS_516._col1=RS_517._col0(Inner),Output:["_col5","_col6","_col7"]
-                                                                          <-Map 153 [SIMPLE_EDGE]
-                                                                            SHUFFLE [RS_517]
+                                                                          Merge Join Operator [MERGEJOIN_886] (rows=348467716 width=135)
+                                                                            Conds:RS_321._col1=RS_322._col0(Inner),Output:["_col5","_col6","_col7"]
+                                                                          <-Map 96 [SIMPLE_EDGE]
+                                                                            SHUFFLE [RS_322]
                                                                               PartitionCols:_col0
-                                                                              Select Operator [SEL_512] (rows=462000 width=1436)
+                                                                              Select Operator [SEL_317] (rows=462000 width=1436)
                                                                                 Output:["_col0","_col1","_col2","_col3"]
-                                                                                Filter Operator [FIL_857] (rows=462000 width=1436)
+                                                                                Filter Operator [FIL_832] (rows=462000 width=1436)
                                                                                   predicate:(i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null)
-                                                                                  TableScan [TS_510] (rows=462000 width=1436)
-                                                                                    default@item,ics,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id"]
-                                                                          <-Reducer 149 [SIMPLE_EDGE]
-                                                                            SHUFFLE [RS_516]
+                                                                                   Please refer to the previous TableScan [TS_89]
+                                                                          <-Reducer 48 [SIMPLE_EDGE]
+                                                                            SHUFFLE [RS_321]
                                                                               PartitionCols:_col1
-                                                                              Merge Join Operator [MERGEJOIN_900] (rows=316788826 width=135)
-                                                                                Conds:RS_513._col0=RS_514._col0(Inner),Output:["_col1"]
-                                                                              <-Map 148 [SIMPLE_EDGE]
-                                                                                SHUFFLE [RS_513]
+                                                                              Merge Join Operator [MERGEJOIN_885] (rows=316788826 width=135)
+                                                                                Conds:RS_318._col0=RS_319._col0(Inner),Output:["_col1"]
+                                                                              <-Map 17 [SIMPLE_EDGE]
+                                                                                SHUFFLE [RS_319]
                                                                                   PartitionCols:_col0
-                                                                                  Select Operator [SEL_506] (rows=287989836 width=135)
+                                                                                  Select Operator [SEL_314] (rows=8116 width=1119)
+                                                                                    Output:["_col0"]
+                                                                                    Filter Operator [FIL_831] (rows=8116 width=1119)
+                                                                                      predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
+                                                                                       Please refer to the previous TableScan [TS_3]
+                                                                              <-Map 98 [SIMPLE_EDGE]
+                                                                                SHUFFLE [RS_318]
+                                                                                  PartitionCols:_col0
+                                                                                  Select Operator [SEL_311] (rows=287989836 width=135)
                                                                                     Output:["_col0","_col1"]
-                                                                                    Filter Operator [FIL_855] (rows=287989836 width=135)
+                                                                                    Filter Operator [FIL_830] (rows=287989836 width=135)
                                                                                       predicate:(cs_item_sk is not null and cs_sold_date_sk is not null)
-                                                                                      TableScan [TS_504] (rows=287989836 width=135)
+                                                                                      TableScan [TS_116] (rows=287989836 width=135)
                                                                                         default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk"]
-                                                                              <-Map 152 [SIMPLE_EDGE]
-                                                                                SHUFFLE [RS_514]
-                                                                                  PartitionCols:_col0
-                                                                                  Select Operator [SEL_509] (rows=8116 width=1119)
-                                                                                    Output:["_col0"]
-                                                                                    Filter Operator [FIL_856] (rows=8116 width=1119)
-                                                                                      predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
-                                                                                      TableScan [TS_507] (rows=73049 width=1119)
-                                                                                        default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                                            <-Reducer 157 [CONTAINS]
-                                                              Reduce Output Operator [RS_550]
+                                                            <-Reducer 53 [CONTAINS]
+                                                              Reduce Output Operator [RS_355]
                                                                 PartitionCols:_col0, _col1, _col2
-                                                                Group By Operator [GBY_549] (rows=609832849 width=108)
+                                                                Group By Operator [GBY_354] (rows=609832849 width=108)
                                                                   Output:["_col0","_col1","_col2","_col3"],aggregations:["count(_col3)"],keys:_col0, _col1, _col2
-                                                                  Group By Operator [GBY_545] (rows=87121617 width=135)
+                                                                  Group By Operator [GBY_350] (rows=87121617 width=135)
                                                                     Output:["_col0","_col1","_col2","_col3"],aggregations:["count(VALUE._col0)"],keys:KEY._col0, KEY._col1, KEY._col2
-                                                                  <-Reducer 156 [SIMPLE_EDGE]
-                                                                    SHUFFLE [RS_544]
+                                                                  <-Reducer 52 [SIMPLE_EDGE]
+                                                                    SHUFFLE [RS_349]
                                                                       PartitionCols:_col0, _col1, _col2
-                                                                      Group By Operator [GBY_543] (rows=174243235 width=135)
+                                                                      Group By Operator [GBY_348] (rows=174243235 width=135)
                                                                         Output:["_col0","_col1","_col2","_col3"],aggregations:["count(1)"],keys:_col0, _col1, _col2
-                                                                        Select Operator [SEL_541] (rows=174243235 width=135)
+                                                                        Select Operator [SEL_346] (rows=174243235 width=135)
                                                                           Output:["_col0","_col1","_col2"]
-                                                                          Merge Join Operator [MERGEJOIN_903] (rows=174243235 width=135)
-                                                                            Conds:RS_538._col1=RS_539._col0(Inner),Output:["_col5","_col6","_col7"]
-                                                                          <-Map 159 [SIMPLE_EDGE]
-                                                                            SHUFFLE [RS_539]
+                                                                          Merge Join Operator [MERGEJOIN_888] (rows=174243235 width=135)
+                                                                            Conds:RS_343._col1=RS_344._col0(Inner),Output:["_col5","_col6","_col7"]
+                                                                          <-Map 96 [SIMPLE_EDGE]
+                                                                            SHUFFLE [RS_344]
                                                                               PartitionCols:_col0
-                                                                              Select Operator [SEL_534] (rows=462000 width=1436)
+                                                                              Select Operator [SEL_339] (rows=462000 width=1436)
                                                                                 Output:["_col0","_col1","_col2","_col3"]
-                                                                                Filter Operator [FIL_860] (rows=462000 width=1436)
+                                                                                Filter Operator [FIL_835] (rows=462000 width=1436)
                                                                                   predicate:(i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null)
-                                                                                  TableScan [TS_532] (rows=462000 width=1436)
-                                                                                    default@item,iws,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id"]
-                                                                          <-Reducer 155 [SIMPLE_EDGE]
-                                                                            SHUFFLE [RS_538]
+                                                                                   Please refer to the previous TableScan [TS_89]
+                                                                          <-Reducer 51 [SIMPLE_EDGE]
+                                                                            SHUFFLE [RS_343]
                                                                               PartitionCols:_col1
-                                                                              Merge Join Operator [MERGEJOIN_902] (rows=158402938 width=135)
-                                                                                Conds:RS_535._col0=RS_536._col0(Inner),Output:["_col1"]
-                                                                              <-Map 154 [SIMPLE_EDGE]
-                                                                                SHUFFLE [RS_535]
+                                                                              Merge Join Operator [MERGEJOIN_887] (rows=158402938 width=135)
+                                                                                Conds:RS_340._col0=RS_341._col0(Inner),Output:["_col1"]
+                                                                              <-Map 17 [SIMPLE_EDGE]
+                                                                                SHUFFLE [RS_341]
                                                                                   PartitionCols:_col0
-                                                                                  Select Operator [SEL_528] (rows=144002668 width=135)
+                                                                                  Select Operator [SEL_336] (rows=8116 width=1119)
+                                                                                    Output:["_col0"]
+                                                                                    Filter Operator [FIL_834] (rows=8116 width=1119)
+                                                                                      predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
+                                                                                       Please refer to the previous TableScan [TS_3]
+                                                                              <-Map 99 [SIMPLE_EDGE]
+                                                                                SHUFFLE [RS_340]
+                                                                                  PartitionCols:_col0
+                                                                                  Select Operator [SEL_333] (rows=144002668 width=135)
                                                                                     Output:["_col0","_col1"]
-                                                                                    Filter Operator [FIL_858] (rows=144002668 width=135)
+                                                                                    Filter Operator [FIL_833] (rows=144002668 width=135)
                                                                                       predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
-                                                                                      TableScan [TS_526] (rows=144002668 width=135)
+                                                                                      TableScan [TS_138] (rows=144002668 width=135)
                                                                                         default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk"]
-                                                                              <-Map 158 [SIMPLE_EDGE]
-                                                                                SHUFFLE [RS_536]
-                                                                                  PartitionCols:_col0
-                                                                                  Select Operator [SEL_531] (rows=8116 width=1119)
-                                                                                    Output:["_col0"]
-                                                                                    Filter Operator [FIL_859] (rows=8116 width=1119)
-                                                                                      predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
-                                                                                      TableScan [TS_529] (rows=73049 width=1119)
-                                                                                        default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                  <-Reducer 5 [CONTAINS]
+                                        <-Reducer 94 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_370]
+                                            PartitionCols:_col1
+                                            Merge Join Operator [MERGEJOIN_882] (rows=316788826 width=135)
+                                              Conds:RS_367._col0=RS_368._col0(Inner),Output:["_col1","_col2","_col3"]
+                                            <-Map 93 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_368]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_281] (rows=18262 width=1119)
+                                                  Output:["_col0"]
+                                                  Filter Operator [FIL_824] (rows=18262 width=1119)
+                                                    predicate:((d_year = 2000) and (d_moy = 11) and d_date_sk is not null)
+                                                    TableScan [TS_86] (rows=73049 width=1119)
+                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                            <-Map 100 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_367]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_278] (rows=287989836 width=135)
+                                                  Output:["_col0","_col1","_col2","_col3"]
+                                                  Filter Operator [FIL_823] (rows=287989836 width=135)
+                                                    predicate:(cs_item_sk is not null and cs_sold_date_sk is not null)
+                                                    TableScan [TS_276] (rows=287989836 width=135)
+                                                      default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk","cs_quantity","cs_list_price"]
+                  <-Reducer 16 [CONTAINS]
                     Reduce Output Operator [RS_584]
                       PartitionCols:_col0, _col1, _col2, _col3, _col4
                       Group By Operator [GBY_583] (rows=2032776160 width=405)
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["sum(_col4)","sum(_col5)"],keys:_col0, _col1, _col2, _col3, 0
-                        Select Operator [SEL_192] (rows=232318249 width=385)
+                        Select Operator [SEL_580] (rows=58081078 width=432)
                           Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-                          Filter Operator [FIL_191] (rows=232318249 width=385)
+                          Filter Operator [FIL_579] (rows=58081078 width=432)
                             predicate:(_col5 > _col1)
-                            Merge Join Operator [MERGEJOIN_906] (rows=696954748 width=385)
+                            Merge Join Operator [MERGEJOIN_908] (rows=174243235 width=432)
                               Conds:(Inner),(Inner),Output:["_col1","_col2","_col3","_col4","_col5","_col6"]
-                            <-Reducer 19 [CUSTOM_SIMPLE_EDGE]
-                              PARTITION_ONLY_SHUFFLE [RS_188]
-                                Group By Operator [GBY_81] (rows=1 width=288)
-                                  Output:["_col0"],aggregations:["avg(VALUE._col0)"]
-                                <-Union 18 [CUSTOM_SIMPLE_EDGE]
-                                  <-Reducer 17 [CONTAINS]
-                                    Reduce Output Operator [RS_80]
-                                      Group By Operator [GBY_79] (rows=1 width=288)
-                                        Output:["_col0"],aggregations:["avg(_col0)"]
-                                        Select Operator [SEL_77] (rows=1108786976 width=108)
-                                          Output:["_col0"]
-                                          Select Operator [SEL_54] (rows=633595212 width=88)
-                                            Output:["_col0","_col1"]
-                                            Merge Join Operator [MERGEJOIN_864] (rows=633595212 width=88)
-                                              Conds:RS_51._col0=RS_52._col0(Inner),Output:["_col1","_col2"]
-                                            <-Map 16 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_51]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_47] (rows=575995635 width=88)
-                                                  Output:["_col0","_col1","_col2"]
-                                                  Filter Operator [FIL_792] (rows=575995635 width=88)
-                                                    predicate:ss_sold_date_sk is not null
-                                                    TableScan [TS_45] (rows=575995635 width=88)
-                                                      default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_quantity","ss_list_price"]
-                                            <-Map 20 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_52]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_50] (rows=8116 width=1119)
+                            <-Reducer 15 [CUSTOM_SIMPLE_EDGE]
+                              PARTITION_ONLY_SHUFFLE [RS_575]
+                                Select Operator [SEL_432] (rows=1 width=8)
+                                  Filter Operator [FIL_431] (rows=1 width=8)
+                                    predicate:(sq_count_check(_col0) <= 1)
+                                    Group By Operator [GBY_429] (rows=1 width=8)
+                                      Output:["_col0"],aggregations:["count()"]
+                                      Select Operator [SEL_424] (rows=1 width=8)
+                                        Group By Operator [GBY_423] (rows=1 width=8)
+                                          Output:["_col0"],aggregations:["count(VALUE._col0)"]
+                                        <-Union 14 [CUSTOM_SIMPLE_EDGE]
+                                          <-Reducer 13 [CONTAINS]
+                                            Reduce Output Operator [RS_422]
+                                              Group By Operator [GBY_421] (rows=1 width=8)
+                                                Output:["_col0"],aggregations:["count(_col0)"]
+                                                Select Operator [SEL_420] (rows=1108786976 width=108)
                                                   Output:["_col0"]
-                                                  Filter Operator [FIL_793] (rows=8116 width=1119)
-                                                    predicate:(d_year BETWEEN 1999 AND 2001 and d_date_sk is not null)
-                                                    TableScan [TS_48] (rows=73049 width=1119)
-                                                      default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                  <-Reducer 22 [CONTAINS]
-                                    Reduce Output Operator [RS_80]
-                                      Group By Operator [GBY_79] (rows=1 width=288)
-                                        Output:["_col0"],aggregations:["avg(_col0)"]
-                                        Select Operator [SEL_77] (rows=1108786976 width=108)
-                                          Output:["_col0"]
-                                          Select Operator [SEL_64] (rows=316788826 width=135)
-                                            Output:["_col0","_col1"]
-                                            Merge Join Operator [MERGEJOIN_865] (rows=316788826 width=135)
-                                              Conds:RS_61._col0=RS_62._col0(Inner),Output:["_col1","_col2"]
-                                            <-Map 21 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_61]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_57] (rows=287989836 width=135)
-                                                  Output:["_col0","_col1","_col2"]
-                                                  Filter Operator [FIL_794] (rows=287989836 

<TRUNCATED>

[12/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query64.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query64.q.out b/ql/src/test/results/clientpositive/perf/query64.q.out
index 6b42393..7f97e39 100644
--- a/ql/src/test/results/clientpositive/perf/query64.q.out
+++ b/ql/src/test/results/clientpositive/perf/query64.q.out
@@ -5,47 +5,47 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Reducer 50 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 19 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
 Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
-Reducer 15 <- Map 14 (SIMPLE_EDGE), Map 16 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
-Reducer 20 <- Map 19 (SIMPLE_EDGE), Map 25 (SIMPLE_EDGE)
-Reducer 21 <- Reducer 20 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE)
-Reducer 22 <- Reducer 21 (SIMPLE_EDGE), Reducer 37 (SIMPLE_EDGE)
-Reducer 23 <- Map 39 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
-Reducer 24 <- Map 40 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
-Reducer 27 <- Map 26 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
-Reducer 28 <- Map 32 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
-Reducer 29 <- Map 33 (SIMPLE_EDGE), Reducer 28 (SIMPLE_EDGE)
-Reducer 3 <- Map 13 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 30 <- Map 34 (SIMPLE_EDGE), Reducer 29 (SIMPLE_EDGE)
-Reducer 36 <- Map 35 (SIMPLE_EDGE), Map 38 (SIMPLE_EDGE)
-Reducer 37 <- Reducer 36 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 15 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 43 <- Map 42 (SIMPLE_EDGE), Map 51 (SIMPLE_EDGE)
-Reducer 44 <- Map 52 (SIMPLE_EDGE), Reducer 43 (SIMPLE_EDGE)
-Reducer 45 <- Reducer 44 (SIMPLE_EDGE), Reducer 54 (SIMPLE_EDGE)
-Reducer 46 <- Map 56 (SIMPLE_EDGE), Reducer 45 (SIMPLE_EDGE)
-Reducer 47 <- Map 57 (SIMPLE_EDGE), Reducer 46 (SIMPLE_EDGE)
-Reducer 48 <- Reducer 47 (SIMPLE_EDGE), Reducer 63 (SIMPLE_EDGE)
-Reducer 49 <- Map 80 (SIMPLE_EDGE), Reducer 48 (SIMPLE_EDGE)
-Reducer 5 <- Map 17 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-Reducer 50 <- Reducer 49 (SIMPLE_EDGE)
-Reducer 54 <- Map 53 (SIMPLE_EDGE), Map 55 (SIMPLE_EDGE)
-Reducer 59 <- Map 58 (SIMPLE_EDGE), Map 64 (SIMPLE_EDGE)
-Reducer 6 <- Map 18 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-Reducer 60 <- Reducer 59 (SIMPLE_EDGE), Reducer 69 (SIMPLE_EDGE)
-Reducer 61 <- Reducer 60 (SIMPLE_EDGE), Reducer 76 (SIMPLE_EDGE)
-Reducer 62 <- Map 78 (SIMPLE_EDGE), Reducer 61 (SIMPLE_EDGE)
-Reducer 63 <- Map 79 (SIMPLE_EDGE), Reducer 62 (SIMPLE_EDGE)
-Reducer 66 <- Map 65 (SIMPLE_EDGE), Map 70 (SIMPLE_EDGE)
-Reducer 67 <- Map 71 (SIMPLE_EDGE), Reducer 66 (SIMPLE_EDGE)
-Reducer 68 <- Map 72 (SIMPLE_EDGE), Reducer 67 (SIMPLE_EDGE)
-Reducer 69 <- Map 73 (SIMPLE_EDGE), Reducer 68 (SIMPLE_EDGE)
-Reducer 7 <- Reducer 24 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
-Reducer 75 <- Map 74 (SIMPLE_EDGE), Map 77 (SIMPLE_EDGE)
-Reducer 76 <- Reducer 75 (SIMPLE_EDGE)
-Reducer 8 <- Map 41 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 12 <- Map 1 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
+Reducer 13 <- Map 20 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 14 <- Reducer 13 (SIMPLE_EDGE), Reducer 36 (SIMPLE_EDGE)
+Reducer 15 <- Map 39 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+Reducer 16 <- Map 40 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 32 (SIMPLE_EDGE)
+Reducer 18 <- Map 40 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
+Reducer 19 <- Reducer 18 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
+Reducer 21 <- Map 20 (SIMPLE_EDGE), Reducer 43 (SIMPLE_EDGE)
+Reducer 22 <- Map 48 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 23 <- Reducer 22 (SIMPLE_EDGE), Reducer 35 (SIMPLE_EDGE)
+Reducer 24 <- Reducer 23 (SIMPLE_EDGE), Reducer 51 (SIMPLE_EDGE)
+Reducer 25 <- Map 55 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 26 <- Map 39 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE)
+Reducer 27 <- Map 20 (SIMPLE_EDGE), Reducer 45 (SIMPLE_EDGE)
+Reducer 28 <- Map 48 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
+Reducer 29 <- Reducer 28 (SIMPLE_EDGE), Reducer 37 (SIMPLE_EDGE)
+Reducer 3 <- Map 20 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 30 <- Reducer 29 (SIMPLE_EDGE), Reducer 53 (SIMPLE_EDGE)
+Reducer 31 <- Map 55 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE)
+Reducer 32 <- Map 39 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
+Reducer 34 <- Map 33 (SIMPLE_EDGE), Map 38 (SIMPLE_EDGE)
+Reducer 35 <- Map 33 (SIMPLE_EDGE), Map 38 (SIMPLE_EDGE)
+Reducer 36 <- Map 33 (SIMPLE_EDGE), Map 38 (SIMPLE_EDGE)
+Reducer 37 <- Map 33 (SIMPLE_EDGE), Map 38 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 34 (SIMPLE_EDGE)
+Reducer 42 <- Map 41 (SIMPLE_EDGE), Map 46 (SIMPLE_EDGE)
+Reducer 43 <- Map 47 (SIMPLE_EDGE), Reducer 42 (SIMPLE_EDGE)
+Reducer 44 <- Map 41 (SIMPLE_EDGE), Map 46 (SIMPLE_EDGE)
+Reducer 45 <- Map 47 (SIMPLE_EDGE), Reducer 44 (SIMPLE_EDGE)
+Reducer 5 <- Map 39 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 50 <- Map 49 (SIMPLE_EDGE), Map 54 (SIMPLE_EDGE)
+Reducer 51 <- Reducer 50 (SIMPLE_EDGE)
+Reducer 52 <- Map 49 (SIMPLE_EDGE), Map 54 (SIMPLE_EDGE)
+Reducer 53 <- Reducer 52 (SIMPLE_EDGE)
+Reducer 6 <- Map 40 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 26 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 40 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
@@ -64,14 +64,14 @@ Stage-0
                 predicate:(_col30 <= _col13)
                 Merge Join Operator [MERGEJOIN_610] (rows=821691577 width=88)
                   Conds:RS_256._col2, _col1, _col3=RS_257._col2, _col1, _col3(Inner),Output:["_col0","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col13","_col14","_col15","_col16","_col30","_col31","_col32","_col33"]
-                <-Reducer 50 [SIMPLE_EDGE]
+                <-Reducer 19 [SIMPLE_EDGE]
                   SHUFFLE [RS_257]
                     PartitionCols:_col2, _col1, _col3
                     Select Operator [SEL_254] (rows=746992327 width=88)
                       Output:["_col1","_col2","_col3","_col13","_col14","_col15","_col16"]
                       Group By Operator [GBY_253] (rows=746992327 width=88)
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17"],aggregations:["count(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)","sum(VALUE._col3)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3, KEY._col4, KEY._col5, KEY._col6, KEY._col7, KEY._col8, KEY._col9, KEY._col10, KEY._col11, KEY._col12, KEY._col13
-                      <-Reducer 49 [SIMPLE_EDGE]
+                      <-Reducer 18 [SIMPLE_EDGE]
                         SHUFFLE [RS_252]
                           PartitionCols:_col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
                           Group By Operator [GBY_251] (rows=1493984654 width=88)
@@ -82,244 +82,238 @@ Stage-0
                                 predicate:(_col56 <> _col19)
                                 Merge Join Operator [MERGEJOIN_609] (rows=1493984654 width=88)
                                   Conds:RS_246._col39=RS_247._col0(Inner),Output:["_col7","_col9","_col14","_col15","_col16","_col17","_col19","_col21","_col22","_col23","_col24","_col26","_col27","_col45","_col46","_col47","_col48","_col51","_col56"]
-                                <-Map 80 [SIMPLE_EDGE]
+                                <-Map 40 [SIMPLE_EDGE]
                                   SHUFFLE [RS_247]
                                     PartitionCols:_col0
                                     Select Operator [SEL_227] (rows=1861800 width=385)
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_573] (rows=1861800 width=385)
                                         predicate:cd_demo_sk is not null
-                                        TableScan [TS_225] (rows=1861800 width=385)
-                                          default@customer_demographics,cd1,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status"]
-                                <-Reducer 48 [SIMPLE_EDGE]
+                                        TableScan [TS_22] (rows=1861800 width=385)
+                                          default@customer_demographics,cd2,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status"]
+                                <-Reducer 17 [SIMPLE_EDGE]
                                   SHUFFLE [RS_246]
                                     PartitionCols:_col39
                                     Merge Join Operator [MERGEJOIN_608] (rows=1358167838 width=88)
                                       Conds:RS_243._col0=RS_244._col18(Inner),Output:["_col7","_col9","_col14","_col15","_col16","_col17","_col19","_col21","_col22","_col23","_col24","_col26","_col27","_col39","_col45","_col46","_col47","_col48","_col51"]
-                                    <-Reducer 47 [SIMPLE_EDGE]
+                                    <-Reducer 16 [SIMPLE_EDGE]
                                       SHUFFLE [RS_243]
                                         PartitionCols:_col0
                                         Merge Join Operator [MERGEJOIN_597] (rows=128840811 width=860)
                                           Conds:RS_240._col1=RS_241._col0(Inner),Output:["_col0","_col7","_col9","_col14","_col15","_col16","_col17","_col19"]
-                                        <-Map 57 [SIMPLE_EDGE]
+                                        <-Map 40 [SIMPLE_EDGE]
                                           SHUFFLE [RS_241]
                                             PartitionCols:_col0
                                             Select Operator [SEL_152] (rows=1861800 width=385)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_561] (rows=1861800 width=385)
                                                 predicate:cd_demo_sk is not null
-                                                TableScan [TS_150] (rows=1861800 width=385)
-                                                  default@customer_demographics,cd2,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status"]
-                                        <-Reducer 46 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_22]
+                                        <-Reducer 15 [SIMPLE_EDGE]
                                           SHUFFLE [RS_240]
                                             PartitionCols:_col1
                                             Merge Join Operator [MERGEJOIN_596] (rows=117128008 width=860)
                                               Conds:RS_237._col3=RS_238._col0(Inner),Output:["_col0","_col1","_col7","_col9","_col14","_col15","_col16","_col17"]
-                                            <-Map 56 [SIMPLE_EDGE]
+                                            <-Map 39 [SIMPLE_EDGE]
                                               SHUFFLE [RS_238]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_149] (rows=40000000 width=1014)
                                                   Output:["_col0","_col1","_col2","_col3","_col4"]
                                                   Filter Operator [FIL_560] (rows=40000000 width=1014)
                                                     predicate:ca_address_sk is not null
-                                                    TableScan [TS_147] (rows=40000000 width=1014)
+                                                    TableScan [TS_19] (rows=40000000 width=1014)
                                                       default@customer_address,ad2,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_street_number","ca_street_name","ca_city","ca_zip"]
-                                            <-Reducer 45 [SIMPLE_EDGE]
+                                            <-Reducer 14 [SIMPLE_EDGE]
                                               SHUFFLE [RS_237]
                                                 PartitionCols:_col3
                                                 Merge Join Operator [MERGEJOIN_595] (rows=106480005 width=860)
                                                   Conds:RS_234._col2=RS_235._col0(Inner),Output:["_col0","_col1","_col3","_col7","_col9"]
-                                                <-Reducer 44 [SIMPLE_EDGE]
+                                                <-Reducer 13 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_234]
                                                     PartitionCols:_col2
                                                     Merge Join Operator [MERGEJOIN_593] (rows=96800003 width=860)
                                                       Conds:RS_231._col4=RS_232._col0(Inner),Output:["_col0","_col1","_col2","_col3","_col7","_col9"]
-                                                    <-Map 52 [SIMPLE_EDGE]
+                                                    <-Map 20 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_232]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_136] (rows=73049 width=1119)
                                                           Output:["_col0","_col1"]
                                                           Filter Operator [FIL_557] (rows=73049 width=1119)
                                                             predicate:d_date_sk is not null
-                                                            TableScan [TS_134] (rows=73049 width=1119)
-                                                              default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                                    <-Reducer 43 [SIMPLE_EDGE]
+                                                            TableScan [TS_3] (rows=73049 width=1119)
+                                                              default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                                    <-Reducer 12 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_231]
                                                         PartitionCols:_col4
                                                         Merge Join Operator [MERGEJOIN_592] (rows=88000001 width=860)
                                                           Conds:RS_228._col5=RS_229._col0(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col7"]
-                                                        <-Map 42 [SIMPLE_EDGE]
+                                                        <-Map 20 [SIMPLE_EDGE]
+                                                          SHUFFLE [RS_229]
+                                                            PartitionCols:_col0
+                                                            Select Operator [SEL_133] (rows=73049 width=1119)
+                                                              Output:["_col0","_col1"]
+                                                              Filter Operator [FIL_556] (rows=73049 width=1119)
+                                                                predicate:d_date_sk is not null
+                                                                 Please refer to the previous TableScan [TS_3]
+                                                        <-Map 1 [SIMPLE_EDGE]
                                                           SHUFFLE [RS_228]
                                                             PartitionCols:_col5
                                                             Select Operator [SEL_130] (rows=80000000 width=860)
                                                               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                                               Filter Operator [FIL_555] (rows=80000000 width=860)
                                                                 predicate:(c_customer_sk is not null and c_first_sales_date_sk is not null and c_first_shipto_date_sk is not null and c_current_cdemo_sk is not null and c_current_hdemo_sk is not null and c_current_addr_sk is not null)
-                                                                TableScan [TS_128] (rows=80000000 width=860)
+                                                                TableScan [TS_0] (rows=80000000 width=860)
                                                                   default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_current_cdemo_sk","c_current_hdemo_sk","c_current_addr_sk","c_first_shipto_date_sk","c_first_sales_date_sk"]
-                                                        <-Map 51 [SIMPLE_EDGE]
-                                                          SHUFFLE [RS_229]
-                                                            PartitionCols:_col0
-                                                            Select Operator [SEL_133] (rows=73049 width=1119)
-                                                              Output:["_col0","_col1"]
-                                                              Filter Operator [FIL_556] (rows=73049 width=1119)
-                                                                predicate:d_date_sk is not null
-                                                                TableScan [TS_131] (rows=73049 width=1119)
-                                                                  default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                                <-Reducer 54 [SIMPLE_EDGE]
+                                                <-Reducer 36 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_235]
                                                     PartitionCols:_col0
                                                     Merge Join Operator [MERGEJOIN_594] (rows=7920 width=107)
                                                       Conds:RS_143._col1=RS_144._col0(Inner),Output:["_col0"]
-                                                    <-Map 53 [SIMPLE_EDGE]
+                                                    <-Map 33 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_143]
                                                         PartitionCols:_col1
                                                         Select Operator [SEL_139] (rows=7200 width=107)
                                                           Output:["_col0","_col1"]
                                                           Filter Operator [FIL_558] (rows=7200 width=107)
                                                             predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
-                                                            TableScan [TS_137] (rows=7200 width=107)
+                                                            TableScan [TS_9] (rows=7200 width=107)
                                                               default@household_demographics,hd2,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_income_band_sk"]
-                                                    <-Map 55 [SIMPLE_EDGE]
+                                                    <-Map 38 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_144]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_142] (rows=20 width=12)
                                                           Output:["_col0"]
                                                           Filter Operator [FIL_559] (rows=20 width=12)
                                                             predicate:ib_income_band_sk is not null
-                                                            TableScan [TS_140] (rows=20 width=12)
+                                                            TableScan [TS_12] (rows=20 width=12)
                                                               default@income_band,ib2,Tbl:COMPLETE,Col:NONE,Output:["ib_income_band_sk"]
-                                    <-Reducer 63 [SIMPLE_EDGE]
+                                    <-Reducer 32 [SIMPLE_EDGE]
                                       SHUFFLE [RS_244]
                                         PartitionCols:_col18
                                         Select Operator [SEL_224] (rows=1234698008 width=88)
                                           Output:["_col1","_col2","_col3","_col4","_col6","_col7","_col18","_col19","_col25","_col26","_col27","_col28","_col31"]
                                           Merge Join Operator [MERGEJOIN_607] (rows=1234698008 width=88)
                                             Conds:RS_221._col13=RS_222._col0(Inner),Output:["_col10","_col11","_col17","_col18","_col19","_col20","_col23","_col28","_col29","_col31","_col32","_col33","_col34"]
-                                          <-Map 79 [SIMPLE_EDGE]
+                                          <-Map 39 [SIMPLE_EDGE]
                                             SHUFFLE [RS_222]
                                               PartitionCols:_col0
                                               Select Operator [SEL_208] (rows=40000000 width=1014)
                                                 Output:["_col0","_col1","_col2","_col3","_col4"]
                                                 Filter Operator [FIL_572] (rows=40000000 width=1014)
                                                   predicate:ca_address_sk is not null
-                                                  TableScan [TS_206] (rows=40000000 width=1014)
-                                                    default@customer_address,ad1,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_street_number","ca_street_name","ca_city","ca_zip"]
-                                          <-Reducer 62 [SIMPLE_EDGE]
+                                                   Please refer to the previous TableScan [TS_19]
+                                          <-Reducer 31 [SIMPLE_EDGE]
                                             SHUFFLE [RS_221]
                                               PartitionCols:_col13
                                               Merge Join Operator [MERGEJOIN_606] (rows=1122452711 width=88)
                                                 Conds:RS_218._col14=RS_219._col0(Inner),Output:["_col10","_col11","_col13","_col17","_col18","_col19","_col20","_col23","_col28","_col29"]
-                                              <-Map 78 [SIMPLE_EDGE]
+                                              <-Map 55 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_219]
                                                   PartitionCols:_col0
                                                   Select Operator [SEL_205] (rows=1704 width=1910)
                                                     Output:["_col0","_col1","_col2"]
                                                     Filter Operator [FIL_571] (rows=1704 width=1910)
                                                       predicate:(s_store_sk is not null and s_store_name is not null and s_zip is not null)
-                                                      TableScan [TS_203] (rows=1704 width=1910)
+                                                      TableScan [TS_75] (rows=1704 width=1910)
                                                         default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name","s_zip"]
-                                              <-Reducer 61 [SIMPLE_EDGE]
+                                              <-Reducer 30 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_218]
                                                   PartitionCols:_col14
                                                   Merge Join Operator [MERGEJOIN_605] (rows=1020411534 width=88)
                                                     Conds:RS_215._col9=RS_216._col0(Inner),Output:["_col10","_col11","_col13","_col14","_col17","_col18","_col19","_col20","_col23"]
-                                                  <-Reducer 60 [SIMPLE_EDGE]
+                                                  <-Reducer 29 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_215]
                                                       PartitionCols:_col9
                                                       Merge Join Operator [MERGEJOIN_604] (rows=927646829 width=88)
                                                         Conds:RS_212._col0=RS_213._col9(Inner),Output:["_col9","_col10","_col11","_col13","_col14","_col17","_col18","_col19","_col20","_col23"]
-                                                      <-Reducer 59 [SIMPLE_EDGE]
-                                                        SHUFFLE [RS_212]
-                                                          PartitionCols:_col0
-                                                          Merge Join Operator [MERGEJOIN_598] (rows=7920 width=107)
-                                                            Conds:RS_209._col1=RS_210._col0(Inner),Output:["_col0"]
-                                                          <-Map 58 [SIMPLE_EDGE]
-                                                            SHUFFLE [RS_209]
-                                                              PartitionCols:_col1
-                                                              Select Operator [SEL_155] (rows=7200 width=107)
-                                                                Output:["_col0","_col1"]
-                                                                Filter Operator [FIL_562] (rows=7200 width=107)
-                                                                  predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
-                                                                  TableScan [TS_153] (rows=7200 width=107)
-                                                                    default@household_demographics,hd1,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_income_band_sk"]
-                                                          <-Map 64 [SIMPLE_EDGE]
-                                                            SHUFFLE [RS_210]
-                                                              PartitionCols:_col0
-                                                              Select Operator [SEL_158] (rows=20 width=12)
-                                                                Output:["_col0"]
-                                                                Filter Operator [FIL_563] (rows=20 width=12)
-                                                                  predicate:ib_income_band_sk is not null
-                                                                  TableScan [TS_156] (rows=20 width=12)
-                                                                    default@income_band,ib1,Tbl:COMPLETE,Col:NONE,Output:["ib_income_band_sk"]
-                                                      <-Reducer 69 [SIMPLE_EDGE]
+                                                      <-Reducer 28 [SIMPLE_EDGE]
                                                         SHUFFLE [RS_213]
                                                           PartitionCols:_col9
                                                           Select Operator [SEL_186] (rows=843315281 width=88)
                                                             Output:["_col6","_col7","_col8","_col9","_col10","_col11","_col14","_col15","_col16","_col17","_col20"]
                                                             Merge Join Operator [MERGEJOIN_602] (rows=843315281 width=88)
                                                               Conds:RS_183._col7=RS_184._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col9","_col10","_col11","_col12","_col15"]
-                                                            <-Map 73 [SIMPLE_EDGE]
+                                                            <-Map 48 [SIMPLE_EDGE]
                                                               SHUFFLE [RS_184]
                                                                 PartitionCols:_col0
                                                                 Select Operator [SEL_173] (rows=2300 width=1179)
                                                                   Output:["_col0"]
                                                                   Filter Operator [FIL_568] (rows=2300 width=1179)
                                                                     predicate:p_promo_sk is not null
-                                                                    TableScan [TS_171] (rows=2300 width=1179)
+                                                                    TableScan [TS_43] (rows=2300 width=1179)
                                                                       default@promotion,promotion,Tbl:COMPLETE,Col:NONE,Output:["p_promo_sk"]
-                                                            <-Reducer 68 [SIMPLE_EDGE]
+                                                            <-Reducer 27 [SIMPLE_EDGE]
                                                               SHUFFLE [RS_183]
                                                                 PartitionCols:_col7
                                                                 Merge Join Operator [MERGEJOIN_601] (rows=766650239 width=88)
                                                                   Conds:RS_180._col0=RS_181._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col9","_col10","_col11","_col12","_col15"]
-                                                                <-Map 72 [SIMPLE_EDGE]
+                                                                <-Map 20 [SIMPLE_EDGE]
                                                                   SHUFFLE [RS_181]
                                                                     PartitionCols:_col0
                                                                     Select Operator [SEL_170] (rows=36524 width=1119)
                                                                       Output:["_col0"]
                                                                       Filter Operator [FIL_567] (rows=36524 width=1119)
                                                                         predicate:((d_year = 2001) and d_date_sk is not null)
-                                                                        TableScan [TS_168] (rows=73049 width=1119)
-                                                                          default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                                                <-Reducer 67 [SIMPLE_EDGE]
+                                                                         Please refer to the previous TableScan [TS_3]
+                                                                <-Reducer 45 [SIMPLE_EDGE]
                                                                   SHUFFLE [RS_180]
                                                                     PartitionCols:_col0
                                                                     Merge Join Operator [MERGEJOIN_600] (rows=696954748 width=88)
                                                                       Conds:RS_177._col1, _col8=RS_178._col0, _col1(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col9","_col10","_col11","_col12","_col15"]
-                                                                    <-Map 71 [SIMPLE_EDGE]
+                                                                    <-Map 47 [SIMPLE_EDGE]
                                                                       SHUFFLE [RS_178]
                                                                         PartitionCols:_col0, _col1
                                                                         Select Operator [SEL_167] (rows=57591150 width=77)
                                                                           Output:["_col0","_col1"]
                                                                           Filter Operator [FIL_566] (rows=57591150 width=77)
                                                                             predicate:(sr_item_sk is not null and sr_ticket_number is not null)
-                                                                            TableScan [TS_165] (rows=57591150 width=77)
+                                                                            TableScan [TS_37] (rows=57591150 width=77)
                                                                               default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_item_sk","sr_ticket_number"]
-                                                                    <-Reducer 66 [SIMPLE_EDGE]
+                                                                    <-Reducer 44 [SIMPLE_EDGE]
                                                                       SHUFFLE [RS_177]
                                                                         PartitionCols:_col1, _col8
                                                                         Merge Join Operator [MERGEJOIN_599] (rows=633595212 width=88)
                                                                           Conds:RS_174._col1=RS_175._col0(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col15"]
-                                                                        <-Map 65 [SIMPLE_EDGE]
+                                                                        <-Map 41 [SIMPLE_EDGE]
                                                                           SHUFFLE [RS_174]
                                                                             PartitionCols:_col1
                                                                             Select Operator [SEL_161] (rows=575995635 width=88)
                                                                               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"]
                                                                               Filter Operator [FIL_564] (rows=575995635 width=88)
                                                                                 predicate:(ss_item_sk is not null and ss_ticket_number is not null and ss_customer_sk is not null and ss_sold_date_sk is not null and ss_store_sk is not null and ss_cdemo_sk is not null and ss_promo_sk is not null and ss_hdemo_sk is not null and ss_addr_sk is not null)
-                                                                                TableScan [TS_159] (rows=575995635 width=88)
+                                                                                TableScan [TS_31] (rows=575995635 width=88)
                                                                                   default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk","ss_cdemo_sk","ss_hdemo_sk","ss_addr_sk","ss_store_sk","ss_promo_sk","ss_ticket_number","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
-                                                                        <-Map 70 [SIMPLE_EDGE]
+                                                                        <-Map 46 [SIMPLE_EDGE]
                                                                           SHUFFLE [RS_175]
                                                                             PartitionCols:_col0
                                                                             Select Operator [SEL_164] (rows=2851 width=1436)
                                                                               Output:["_col0","_col3"]
                                                                               Filter Operator [FIL_565] (rows=2851 width=1436)
                                                                                 predicate:((i_color) IN ('maroon', 'burnished', 'dim', 'steel', 'navajo', 'chocolate') and i_current_price BETWEEN 35 AND 45 and i_current_price BETWEEN 36 AND 50 and i_item_sk is not null)
-                                                                                TableScan [TS_162] (rows=462000 width=1436)
+                                                                                TableScan [TS_34] (rows=462000 width=1436)
                                                                                   default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_current_price","i_color","i_product_name"]
-                                                  <-Reducer 76 [SIMPLE_EDGE]
+                                                      <-Reducer 37 [SIMPLE_EDGE]
+                                                        SHUFFLE [RS_212]
+                                                          PartitionCols:_col0
+                                                          Merge Join Operator [MERGEJOIN_598] (rows=7920 width=107)
+                                                            Conds:RS_209._col1=RS_210._col0(Inner),Output:["_col0"]
+                                                          <-Map 33 [SIMPLE_EDGE]
+                                                            SHUFFLE [RS_209]
+                                                              PartitionCols:_col1
+                                                              Select Operator [SEL_155] (rows=7200 width=107)
+                                                                Output:["_col0","_col1"]
+                                                                Filter Operator [FIL_562] (rows=7200 width=107)
+                                                                  predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
+                                                                   Please refer to the previous TableScan [TS_9]
+                                                          <-Map 38 [SIMPLE_EDGE]
+                                                            SHUFFLE [RS_210]
+                                                              PartitionCols:_col0
+                                                              Select Operator [SEL_158] (rows=20 width=12)
+                                                                Output:["_col0"]
+                                                                Filter Operator [FIL_563] (rows=20 width=12)
+                                                                  predicate:ib_income_band_sk is not null
+                                                                   Please refer to the previous TableScan [TS_12]
+                                                  <-Reducer 53 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_216]
                                                       PartitionCols:_col0
                                                       Select Operator [SEL_202] (rows=52798137 width=135)
@@ -328,7 +322,7 @@ Stage-0
                                                           predicate:(_col1 > (2 * _col2))
                                                           Group By Operator [GBY_200] (rows=158394413 width=135)
                                                             Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0
-                                                          <-Reducer 75 [SIMPLE_EDGE]
+                                                          <-Reducer 52 [SIMPLE_EDGE]
                                                             SHUFFLE [RS_199]
                                                               PartitionCols:_col0
                                                               Group By Operator [GBY_198] (rows=316788826 width=135)
@@ -337,23 +331,23 @@ Stage-0
                                                                   Output:["_col0","_col1","_col2"]
                                                                   Merge Join Operator [MERGEJOIN_603] (rows=316788826 width=135)
                                                                     Conds:RS_193._col0, _col1=RS_194._col0, _col1(Inner),Output:["_col0","_col2","_col5","_col6","_col7"]
-                                                                  <-Map 74 [SIMPLE_EDGE]
+                                                                  <-Map 49 [SIMPLE_EDGE]
                                                                     SHUFFLE [RS_193]
                                                                       PartitionCols:_col0, _col1
                                                                       Select Operator [SEL_189] (rows=287989836 width=135)
                                                                         Output:["_col0","_col1","_col2"]
                                                                         Filter Operator [FIL_569] (rows=287989836 width=135)
                                                                           predicate:(cs_order_number is not null and cs_item_sk is not null)
-                                                                          TableScan [TS_187] (rows=287989836 width=135)
+                                                                          TableScan [TS_59] (rows=287989836 width=135)
                                                                             default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_item_sk","cs_order_number","cs_ext_list_price"]
-                                                                  <-Map 77 [SIMPLE_EDGE]
+                                                                  <-Map 54 [SIMPLE_EDGE]
                                                                     SHUFFLE [RS_194]
                                                                       PartitionCols:_col0, _col1
                                                                       Select Operator [SEL_192] (rows=28798881 width=106)
                                                                         Output:["_col0","_col1","_col2","_col3","_col4"]
                                                                         Filter Operator [FIL_570] (rows=28798881 width=106)
                                                                           predicate:(cr_order_number is not null and cr_item_sk is not null)
-                                                                          TableScan [TS_190] (rows=28798881 width=106)
+                                                                          TableScan [TS_62] (rows=28798881 width=106)
                                                                             default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_item_sk","cr_order_number","cr_refunded_cash","cr_reversed_charge","cr_store_credit"]
                 <-Reducer 9 [SIMPLE_EDGE]
                   SHUFFLE [RS_256]
@@ -373,151 +367,141 @@ Stage-0
                                 predicate:(_col56 <> _col19)
                                 Merge Join Operator [MERGEJOIN_591] (rows=1493984654 width=88)
                                   Conds:RS_118._col39=RS_119._col0(Inner),Output:["_col7","_col9","_col14","_col15","_col16","_col17","_col19","_col21","_col22","_col23","_col24","_col26","_col27","_col45","_col46","_col47","_col48","_col51","_col56"]
-                                <-Map 41 [SIMPLE_EDGE]
+                                <-Map 40 [SIMPLE_EDGE]
                                   SHUFFLE [RS_119]
                                     PartitionCols:_col0
                                     Select Operator [SEL_99] (rows=1861800 width=385)
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_554] (rows=1861800 width=385)
                                         predicate:cd_demo_sk is not null
-                                        TableScan [TS_97] (rows=1861800 width=385)
-                                          default@customer_demographics,cd1,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status"]
+                                         Please refer to the previous TableScan [TS_22]
                                 <-Reducer 7 [SIMPLE_EDGE]
                                   SHUFFLE [RS_118]
                                     PartitionCols:_col39
                                     Merge Join Operator [MERGEJOIN_590] (rows=1358167838 width=88)
                                       Conds:RS_115._col0=RS_116._col18(Inner),Output:["_col7","_col9","_col14","_col15","_col16","_col17","_col19","_col21","_col22","_col23","_col24","_col26","_col27","_col39","_col45","_col46","_col47","_col48","_col51"]
-                                    <-Reducer 24 [SIMPLE_EDGE]
+                                    <-Reducer 26 [SIMPLE_EDGE]
                                       SHUFFLE [RS_116]
                                         PartitionCols:_col18
                                         Select Operator [SEL_96] (rows=1234698008 width=88)
                                           Output:["_col1","_col2","_col3","_col4","_col6","_col7","_col18","_col19","_col25","_col26","_col27","_col28","_col31"]
                                           Merge Join Operator [MERGEJOIN_589] (rows=1234698008 width=88)
                                             Conds:RS_93._col13=RS_94._col0(Inner),Output:["_col10","_col11","_col17","_col18","_col19","_col20","_col23","_col28","_col29","_col31","_col32","_col33","_col34"]
-                                          <-Map 40 [SIMPLE_EDGE]
+                                          <-Map 39 [SIMPLE_EDGE]
                                             SHUFFLE [RS_94]
                                               PartitionCols:_col0
                                               Select Operator [SEL_80] (rows=40000000 width=1014)
                                                 Output:["_col0","_col1","_col2","_col3","_col4"]
                                                 Filter Operator [FIL_553] (rows=40000000 width=1014)
                                                   predicate:ca_address_sk is not null
-                                                  TableScan [TS_78] (rows=40000000 width=1014)
-                                                    default@customer_address,ad1,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_street_number","ca_street_name","ca_city","ca_zip"]
-                                          <-Reducer 23 [SIMPLE_EDGE]
+                                                   Please refer to the previous TableScan [TS_19]
+                                          <-Reducer 25 [SIMPLE_EDGE]
                                             SHUFFLE [RS_93]
                                               PartitionCols:_col13
                                               Merge Join Operator [MERGEJOIN_588] (rows=1122452711 width=88)
                                                 Conds:RS_90._col14=RS_91._col0(Inner),Output:["_col10","_col11","_col13","_col17","_col18","_col19","_col20","_col23","_col28","_col29"]
-                                              <-Map 39 [SIMPLE_EDGE]
+                                              <-Map 55 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_91]
                                                   PartitionCols:_col0
                                                   Select Operator [SEL_77] (rows=1704 width=1910)
                                                     Output:["_col0","_col1","_col2"]
                                                     Filter Operator [FIL_552] (rows=1704 width=1910)
                                                       predicate:(s_store_sk is not null and s_store_name is not null and s_zip is not null)
-                                                      TableScan [TS_75] (rows=1704 width=1910)
-                                                        default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name","s_zip"]
-                                              <-Reducer 22 [SIMPLE_EDGE]
+                                                       Please refer to the previous TableScan [TS_75]
+                                              <-Reducer 24 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_90]
                                                   PartitionCols:_col14
                                                   Merge Join Operator [MERGEJOIN_587] (rows=1020411534 width=88)
                                                     Conds:RS_87._col9=RS_88._col0(Inner),Output:["_col10","_col11","_col13","_col14","_col17","_col18","_col19","_col20","_col23"]
-                                                  <-Reducer 21 [SIMPLE_EDGE]
+                                                  <-Reducer 23 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_87]
                                                       PartitionCols:_col9
                                                       Merge Join Operator [MERGEJOIN_586] (rows=927646829 width=88)
                                                         Conds:RS_84._col0=RS_85._col9(Inner),Output:["_col9","_col10","_col11","_col13","_col14","_col17","_col18","_col19","_col20","_col23"]
-                                                      <-Reducer 20 [SIMPLE_EDGE]
-                                                        SHUFFLE [RS_84]
-                                                          PartitionCols:_col0
-                                                          Merge Join Operator [MERGEJOIN_580] (rows=7920 width=107)
-                                                            Conds:RS_81._col1=RS_82._col0(Inner),Output:["_col0"]
-                                                          <-Map 19 [SIMPLE_EDGE]
-                                                            SHUFFLE [RS_81]
-                                                              PartitionCols:_col1
-                                                              Select Operator [SEL_27] (rows=7200 width=107)
-                                                                Output:["_col0","_col1"]
-                                                                Filter Operator [FIL_543] (rows=7200 width=107)
-                                                                  predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
-                                                                  TableScan [TS_25] (rows=7200 width=107)
-                                                                    default@household_demographics,hd1,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_income_band_sk"]
-                                                          <-Map 25 [SIMPLE_EDGE]
-                                                            SHUFFLE [RS_82]
-                                                              PartitionCols:_col0
-                                                              Select Operator [SEL_30] (rows=20 width=12)
-                                                                Output:["_col0"]
-                                                                Filter Operator [FIL_544] (rows=20 width=12)
-                                                                  predicate:ib_income_band_sk is not null
-                                                                  TableScan [TS_28] (rows=20 width=12)
-                                                                    default@income_band,ib1,Tbl:COMPLETE,Col:NONE,Output:["ib_income_band_sk"]
-                                                      <-Reducer 30 [SIMPLE_EDGE]
+                                                      <-Reducer 22 [SIMPLE_EDGE]
                                                         SHUFFLE [RS_85]
                                                           PartitionCols:_col9
                                                           Select Operator [SEL_58] (rows=843315281 width=88)
                                                             Output:["_col6","_col7","_col8","_col9","_col10","_col11","_col14","_col15","_col16","_col17","_col20"]
                                                             Merge Join Operator [MERGEJOIN_584] (rows=843315281 width=88)
                                                               Conds:RS_55._col7=RS_56._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col9","_col10","_col11","_col12","_col15"]
-                                                            <-Map 34 [SIMPLE_EDGE]
+                                                            <-Map 48 [SIMPLE_EDGE]
                                                               SHUFFLE [RS_56]
                                                                 PartitionCols:_col0
                                                                 Select Operator [SEL_45] (rows=2300 width=1179)
                                                                   Output:["_col0"]
                                                                   Filter Operator [FIL_549] (rows=2300 width=1179)
                                                                     predicate:p_promo_sk is not null
-                                                                    TableScan [TS_43] (rows=2300 width=1179)
-                                                                      default@promotion,promotion,Tbl:COMPLETE,Col:NONE,Output:["p_promo_sk"]
-                                                            <-Reducer 29 [SIMPLE_EDGE]
+                                                                     Please refer to the previous TableScan [TS_43]
+                                                            <-Reducer 21 [SIMPLE_EDGE]
                                                               SHUFFLE [RS_55]
                                                                 PartitionCols:_col7
                                                                 Merge Join Operator [MERGEJOIN_583] (rows=766650239 width=88)
                                                                   Conds:RS_52._col0=RS_53._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col9","_col10","_col11","_col12","_col15"]
-                                                                <-Map 33 [SIMPLE_EDGE]
+                                                                <-Map 20 [SIMPLE_EDGE]
                                                                   SHUFFLE [RS_53]
                                                                     PartitionCols:_col0
                                                                     Select Operator [SEL_42] (rows=36524 width=1119)
                                                                       Output:["_col0"]
                                                                       Filter Operator [FIL_548] (rows=36524 width=1119)
                                                                         predicate:((d_year = 2000) and d_date_sk is not null)
-                                                                        TableScan [TS_40] (rows=73049 width=1119)
-                                                                          default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                                                                <-Reducer 28 [SIMPLE_EDGE]
+                                                                         Please refer to the previous TableScan [TS_3]
+                                                                <-Reducer 43 [SIMPLE_EDGE]
                                                                   SHUFFLE [RS_52]
                                                                     PartitionCols:_col0
                                                                     Merge Join Operator [MERGEJOIN_582] (rows=696954748 width=88)
                                                                       Conds:RS_49._col1, _col8=RS_50._col0, _col1(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col9","_col10","_col11","_col12","_col15"]
-                                                                    <-Map 32 [SIMPLE_EDGE]
+                                                                    <-Map 47 [SIMPLE_EDGE]
                                                                       SHUFFLE [RS_50]
                                                                         PartitionCols:_col0, _col1
                                                                         Select Operator [SEL_39] (rows=57591150 width=77)
                                                                           Output:["_col0","_col1"]
                                                                           Filter Operator [FIL_547] (rows=57591150 width=77)
                                                                             predicate:(sr_item_sk is not null and sr_ticket_number is not null)
-                                                                            TableScan [TS_37] (rows=57591150 width=77)
-                                                                              default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_item_sk","sr_ticket_number"]
-                                                                    <-Reducer 27 [SIMPLE_EDGE]
+                                                                             Please refer to the previous TableScan [TS_37]
+                                                                    <-Reducer 42 [SIMPLE_EDGE]
                                                                       SHUFFLE [RS_49]
                                                                         PartitionCols:_col1, _col8
                                                                         Merge Join Operator [MERGEJOIN_581] (rows=633595212 width=88)
                                                                           Conds:RS_46._col1=RS_47._col0(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col15"]
-                                                                        <-Map 26 [SIMPLE_EDGE]
+                                                                        <-Map 41 [SIMPLE_EDGE]
                                                                           SHUFFLE [RS_46]
                                                                             PartitionCols:_col1
                                                                             Select Operator [SEL_33] (rows=575995635 width=88)
                                                                               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"]
                                                                               Filter Operator [FIL_545] (rows=575995635 width=88)
                                                                                 predicate:(ss_item_sk is not null and ss_ticket_number is not null and ss_customer_sk is not null and ss_sold_date_sk is not null and ss_store_sk is not null and ss_cdemo_sk is not null and ss_promo_sk is not null and ss_hdemo_sk is not null and ss_addr_sk is not null)
-                                                                                TableScan [TS_31] (rows=575995635 width=88)
-                                                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_customer_sk","ss_cdemo_sk","ss_hdemo_sk","ss_addr_sk","ss_store_sk","ss_promo_sk","ss_ticket_number","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
-                                                                        <-Map 31 [SIMPLE_EDGE]
+                                                                                 Please refer to the previous TableScan [TS_31]
+                                                                        <-Map 46 [SIMPLE_EDGE]
                                                                           SHUFFLE [RS_47]
                                                                             PartitionCols:_col0
                                                                             Select Operator [SEL_36] (rows=2851 width=1436)
                                                                               Output:["_col0","_col3"]
                                                                               Filter Operator [FIL_546] (rows=2851 width=1436)
                                                                                 predicate:((i_color) IN ('maroon', 'burnished', 'dim', 'steel', 'navajo', 'chocolate') and i_current_price BETWEEN 35 AND 45 and i_current_price BETWEEN 36 AND 50 and i_item_sk is not null)
-                                                                                TableScan [TS_34] (rows=462000 width=1436)
-                                                                                  default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_current_price","i_color","i_product_name"]
-                                                  <-Reducer 37 [SIMPLE_EDGE]
+                                                                                 Please refer to the previous TableScan [TS_34]
+                                                      <-Reducer 35 [SIMPLE_EDGE]
+                                                        SHUFFLE [RS_84]
+                                                          PartitionCols:_col0
+                                                          Merge Join Operator [MERGEJOIN_580] (rows=7920 width=107)
+                                                            Conds:RS_81._col1=RS_82._col0(Inner),Output:["_col0"]
+                                                          <-Map 33 [SIMPLE_EDGE]
+                                                            SHUFFLE [RS_81]
+                                                              PartitionCols:_col1
+                                                              Select Operator [SEL_27] (rows=7200 width=107)
+                                                                Output:["_col0","_col1"]
+                                                                Filter Operator [FIL_543] (rows=7200 width=107)
+                                                                  predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
+                                                                   Please refer to the previous TableScan [TS_9]
+                                                          <-Map 38 [SIMPLE_EDGE]
+                                                            SHUFFLE [RS_82]
+                                                              PartitionCols:_col0
+                                                              Select Operator [SEL_30] (rows=20 width=12)
+                                                                Output:["_col0"]
+                                                                Filter Operator [FIL_544] (rows=20 width=12)
+                                                                  predicate:ib_income_band_sk is not null
+                                                                   Please refer to the previous TableScan [TS_12]
+                                                  <-Reducer 51 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_88]
                                                       PartitionCols:_col0
                                                       Select Operator [SEL_74] (rows=52798137 width=135)
@@ -526,7 +510,7 @@ Stage-0
                                                           predicate:(_col1 > (2 * _col2))
                                                           Group By Operator [GBY_72] (rows=158394413 width=135)
                                                             Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0
-                                                          <-Reducer 36 [SIMPLE_EDGE]
+                                                          <-Reducer 50 [SIMPLE_EDGE]
                                                             SHUFFLE [RS_71]
                                                               PartitionCols:_col0
                                                               Group By Operator [GBY_70] (rows=316788826 width=135)
@@ -535,99 +519,79 @@ Stage-0
                                                                   Output:["_col0","_col1","_col2"]
                                                                   Merge Join Operator [MERGEJOIN_585] (rows=316788826 width=135)
                                                                     Conds:RS_65._col0, _col1=RS_66._col0, _col1(Inner),Output:["_col0","_col2","_col5","_col6","_col7"]
-                                                                  <-Map 35 [SIMPLE_EDGE]
+                                                                  <-Map 49 [SIMPLE_EDGE]
                                                                     SHUFFLE [RS_65]
                                                                       PartitionCols:_col0, _col1
                                                                       Select Operator [SEL_61] (rows=287989836 width=135)
                                                                         Output:["_col0","_col1","_col2"]
                                                                         Filter Operator [FIL_550] (rows=287989836 width=135)
                                                                           predicate:(cs_order_number is not null and cs_item_sk is not null)
-                                                                          TableScan [TS_59] (rows=287989836 width=135)
-                                                                            default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_item_sk","cs_order_number","cs_ext_list_price"]
-                                                                  <-Map 38 [SIMPLE_EDGE]
+                                                                           Please refer to the previous TableScan [TS_59]
+                                                                  <-Map 54 [SIMPLE_EDGE]
                                                                     SHUFFLE [RS_66]
                                                                       PartitionCols:_col0, _col1
                                                                       Select Operator [SEL_64] (rows=28798881 width=106)
                                                                         Output:["_col0","_col1","_col2","_col3","_col4"]
                                                                         Filter Operator [FIL_551] (rows=28798881 width=106)
                                                                           predicate:(cr_order_number is not null and cr_item_sk is not null)
-                                                                          TableScan [TS_62] (rows=28798881 width=106)
-                                                                            default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_item_sk","cr_order_number","cr_refunded_cash","cr_reversed_charge","cr_store_credit"]
+                                                                           Please refer to the previous TableScan [TS_62]
                                     <-Reducer 6 [SIMPLE_EDGE]
                                       SHUFFLE [RS_115]
                                         PartitionCols:_col0
                                         Merge Join Operator [MERGEJOIN_579] (rows=128840811 width=860)
                                           Conds:RS_112._col1=RS_113._col0(Inner),Output:["_col0","_col7","_col9","_col14","_col15","_col16","_col17","_col19"]
-                                        <-Map 18 [SIMPLE_EDGE]
+                                        <-Map 40 [SIMPLE_EDGE]
                                           SHUFFLE [RS_113]
                                             PartitionCols:_col0
                                             Select Operator [SEL_24] (rows=1861800 width=385)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_542] (rows=1861800 width=385)
                                                 predicate:cd_demo_sk is not null
-                                                TableScan [TS_22] (rows=1861800 width=385)
-                                                  default@customer_demographics,cd2,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status"]
+                                                 Please refer to the previous TableScan [TS_22]
                                         <-Reducer 5 [SIMPLE_EDGE]
                                           SHUFFLE [RS_112]
                                             PartitionCols:_col1
                                             Merge Join Operator [MERGEJOIN_578] (rows=117128008 width=860)
                                               Conds:RS_109._col3=RS_110._col0(Inner),Output:["_col0","_col1","_col7","_col9","_col14","_col15","_col16","_col17"]
-                                            <-Map 17 [SIMPLE_EDGE]
+                                            <-Map 39 [SIMPLE_EDGE]
                                               SHUFFLE [RS_110]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_21] (rows=40000000 width=1014)
                                                   Output:["_col0","_col1","_col2","_col3","_col4"]
                                                   Filter Operator [FIL_541] (rows=40000000 width=1014)
                                                     predicate:ca_address_sk is not null
-                                                    TableScan [TS_19] (rows=40000000 width=1014)
-                                                      default@customer_address,ad2,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_street_number","ca_street_name","ca_city","ca_zip"]
+                                                     Please refer to the previous TableScan [TS_19]
                                             <-Reducer 4 [SIMPLE_EDGE]
                                               SHUFFLE [RS_109]
                                                 PartitionCols:_col3
                                                 Merge Join Operator [MERGEJOIN_577] (rows=106480005 width=860)
                                                   Conds:RS_106._col2=RS_107._col0(Inner),Output:["_col0","_col1","_col3","_col7","_col9"]
-                                                <-Reducer 15 [SIMPLE_EDGE]
-                                                  SHUFFLE [RS_107]
-                                                    PartitionCols:_col0
-                                                    Merge Join Operator [MERGEJOIN_576] (rows=7920 width=107)
-                                                      Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col0"]
-                                                    <-Map 14 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_15]
-                                                        PartitionCols:_col1
-                                                        Select Operator [SEL_11] (rows=7200 width=107)
-                                                          Output:["_col0","_col1"]
-                                                          Filter Operator [FIL_539] (rows=7200 width=107)
-                                                            predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
-                                                            TableScan [TS_9] (rows=7200 width=107)
-                                                              default@household_demographics,hd2,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_income_band_sk"]
-                                                    <-Map 16 [SIMPLE_EDGE]
-                                                      SHUFFLE [RS_16]
-                                                        PartitionCols:_col0
-                                                        Select Operator [SEL_14] (rows=20 width=12)
-                                                          Output:["_col0"]
-                                                          Filter Operator [FIL_540] (rows=20 width=12)
-                                                            predicate:ib_income_band_sk is not null
-                                                            TableScan [TS_12] (rows=20 width=12)
-                                                              default@income_band,ib2,Tbl:COMPLETE,Col:NONE,Output:["ib_income_band_sk"]
                                                 <-Reducer 3 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_106]
                                                     PartitionCols:_col2
                                                     Merge Join Operator [MERGEJOIN_575] (rows=96800003 width=860)
                                                       Conds:RS_103._col4=RS_104._col0(Inner),Output:["_col0","_col1","_col2","_col3","_col7","_col9"]
-                                                    <-Map 13 [SIMPLE_EDGE]
+                                                    <-Map 20 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_104]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_8] (rows=73049 width=1119)
                                                           Output:["_col0","_col1"]
                                                           Filter Operator [FIL_538] (rows=73049 width=1119)
                                                             predicate:d_date_sk is not null
-                                                            TableScan [TS_6] (rows=73049 width=1119)
-                                                              default@date_dim,d3,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                                             Please refer to the previous TableScan [TS_3]
                                                     <-Reducer 2 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_103]
                                                         PartitionCols:_col4
                                                         Merge Join Operator [MERGEJOIN_574] (rows=88000001 width=860)
                                                           Conds:RS_100._col5=RS_101._col0(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col7"]
+                                                        <-Map 20 [SIMPLE_EDGE]
+                                                          SHUFFLE [RS_101]
+                                                            PartitionCols:_col0
+                                                            Select Operator [SEL_5] (rows=73049 width=1119)
+                                                              Output:["_col0","_col1"]
+                                                              Filter Operator [FIL_537] (rows=73049 width=1119)
+                                                                predicate:d_date_sk is not null
+                                                                 Please refer to the previous TableScan [TS_3]
                                                         <-Map 1 [SIMPLE_EDGE]
                                                           SHUFFLE [RS_100]
                                                             PartitionCols:_col5
@@ -635,15 +599,26 @@ Stage-0
                                                               Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                                               Filter Operator [FIL_536] (rows=80000000 width=860)
                                                                 predicate:(c_customer_sk is not null and c_first_sales_date_sk is not null and c_first_shipto_date_sk is not null and c_current_cdemo_sk is not null and c_current_hdemo_sk is not null and c_current_addr_sk is not null)
-                                                                TableScan [TS_0] (rows=80000000 width=860)
-                                                                  default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_current_cdemo_sk","c_current_hdemo_sk","c_current_addr_sk","c_first_shipto_date_sk","c_first_sales_date_sk"]
-                                                        <-Map 12 [SIMPLE_EDGE]
-                                                          SHUFFLE [RS_101]
-                                                            PartitionCols:_col0
-                                                            Select Operator [SEL_5] (rows=73049 width=1119)
-                                                              Output:["_col0","_col1"]
-                                                              Filter Operator [FIL_537] (rows=73049 width=1119)
-                                                                predicate:d_date_sk is not null
-                                                                TableScan [TS_3] (rows=73049 width=1119)
-                                                                  default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                                                 Please refer to the previous TableScan [TS_0]
+                                                <-Reducer 34 [SIMPLE_EDGE]
+                                                  SHUFFLE [RS_107]
+                                                    PartitionCols:_col0
+                                                    Merge Join Operator [MERGEJOIN_576] (rows=7920 width=107)
+                                                      Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col0"]
+                                                    <-Map 33 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_15]
+                                                        PartitionCols:_col1
+                                                        Select Operator [SEL_11] (rows=7200 width=107)
+                                                          Output:["_col0","_col1"]
+                                                          Filter Operator [FIL_539] (rows=7200 width=107)
+                                                            predicate:(hd_demo_sk is not null and hd_income_band_sk is not null)
+                                                             Please refer to the previous TableScan [TS_9]
+                                                    <-Map 38 [SIMPLE_EDGE]
+                                                      SHUFFLE [RS_16]
+                                                        PartitionCols:_col0
+                                                        Select Operator [SEL_14] (rows=20 width=12)
+                                                          Output:["_col0"]
+                                                          Filter Operator [FIL_540] (rows=20 width=12)
+                                                            predicate:ib_income_band_sk is not null
+                                                             Please refer to the previous TableScan [TS_12]
 


[40/50] [abbrv] hive git commit: HIVE-1010 Addendum: Commit file missing from original commit (Gunther Hagleitner)

Posted by we...@apache.org.
HIVE-1010 Addendum: Commit file missing from original commit (Gunther Hagleitner)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0ce98b3a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0ce98b3a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0ce98b3a

Branch: refs/heads/hive-14535
Commit: 0ce98b3a7527f72216e9e41f7e610b44ee524758
Parents: 7d4554d
Author: Gunther Hagleitner <gu...@apache.org>
Authored: Mon May 15 14:57:30 2017 -0700
Committer: Gunther Hagleitner <gu...@apache.org>
Committed: Mon May 15 14:57:30 2017 -0700

----------------------------------------------------------------------
 metastore/scripts/upgrade/hive/upgrade.order.hive | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0ce98b3a/metastore/scripts/upgrade/hive/upgrade.order.hive
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/hive/upgrade.order.hive b/metastore/scripts/upgrade/hive/upgrade.order.hive
new file mode 100644
index 0000000..e69de29


[50/50] [abbrv] hive git commit: HIVE-14671 : merge master into hive-14535 (Wei Zheng)

Posted by we...@apache.org.
HIVE-14671 : merge master into hive-14535 (Wei Zheng)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/08edf03f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/08edf03f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/08edf03f

Branch: refs/heads/hive-14535
Commit: 08edf03f6338b4b43b53e9310faa8d38460e4b4f
Parents: 7751107 6e0c52e
Author: Wei Zheng <we...@apache.org>
Authored: Tue May 16 19:51:52 2017 -0700
Committer: Wei Zheng <we...@apache.org>
Committed: Tue May 16 19:51:52 2017 -0700

----------------------------------------------------------------------
 .gitignore                                      |    2 -
 .../java/org/apache/hive/beeline/BeeLine.java   |   17 +-
 .../org/apache/hive/beeline/BeeLineOpts.java    |   11 +
 .../java/org/apache/hive/beeline/Commands.java  |  110 +-
 .../apache/hive/beeline/HiveSchemaHelper.java   |  108 +-
 .../org/apache/hive/beeline/HiveSchemaTool.java |  148 +-
 beeline/src/main/resources/BeeLine.properties   |    3 +
 .../org/apache/hive/beeline/ProxyAuthTest.java  |    4 +-
 .../apache/hive/beeline/TestHiveSchemaTool.java |    4 +-
 .../apache/hive/beeline/cli/TestHiveCli.java    |    8 +-
 checkstyle/checkstyle.xml                       |   12 +-
 .../hadoop/hive/common/CompressionUtils.java    |    8 +-
 .../hive/common/CopyOnFirstWriteProperties.java |   18 +
 .../apache/hadoop/hive/common/JavaUtils.java    |    4 +-
 .../hadoop/hive/common/StatsSetupConst.java     |    6 +-
 .../hive/common/ValidCompactorTxnList.java      |    4 +-
 .../common/classification/RetrySemantics.java   |    1 -
 .../hive/common/cli/CommonCliOptions.java       |    4 +-
 .../hive/common/jsonexplain/DagJsonParser.java  |    6 +
 .../hadoop/hive/common/jsonexplain/Op.java      |   54 +-
 .../hadoop/hive/common/jsonexplain/Stage.java   |   20 +-
 .../hadoop/hive/common/jsonexplain/Vertex.java  |  100 +-
 .../common/metrics/common/MetricsVariable.java  |    2 +-
 .../hadoop/hive/common/type/Decimal128.java     |   12 +-
 .../hadoop/hive/common/type/TimestampTZ.java    |  197 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   21 +-
 .../apache/hadoop/hive/conf/HiveConfUtil.java   |    3 +-
 .../apache/hive/common/util/HashCodeUtil.java   |   30 +-
 .../hive/common/util/HiveStringUtils.java       |    4 +-
 .../apache/hive/common/util/RetryUtilities.java |  112 +
 .../hive/common/util/ShutdownHookManager.java   |    2 +-
 .../java/org/apache/hive/http/HttpServer.java   |    6 +-
 .../org/apache/hive/http/JMXJsonServlet.java    |    6 +-
 .../apache/hadoop/hive/common/TestLogUtils.java |    4 +-
 .../hadoop/hive/common/TestTezJsonParser.java   |  174 +
 .../hive/common/type/TestTimestampTZ.java       |  120 +
 .../hive/common/util/TestRetryUtilities.java    |  150 +
 .../test/queries/clientnegative/serde_regex.q   |    4 +-
 .../test/queries/clientpositive/serde_regex.q   |    6 +-
 .../results/clientnegative/serde_regex.q.out    |    6 +-
 .../results/clientpositive/serde_regex.q.out    |   12 +-
 .../hadoop/hive/druid/DruidStorageHandler.java  |    6 +
 .../src/test/queries/positive/hbase_timestamp.q |   26 +-
 .../test/results/positive/hbase_timestamp.q.out |   52 +-
 .../mapreduce/TestHCatMultiOutputFormat.java    |    2 +-
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4     |    2 +-
 .../hive/it/custom/udfs/GenericUDFRot13.java    |   18 +
 .../custom/udfs/vector/VectorStringRot13.java   |   18 +
 .../listener/DummyRawStoreFailEvent.java        |    7 +-
 .../test/queries/clientpositive/create_like.q   |    3 +-
 .../queries/clientpositive/orc_format_part.q    |   12 +-
 .../clientpositive/orc_nonstd_partitions_loc.q  |   14 +-
 .../queries/clientpositive/rcfile_format_part.q |   12 +-
 .../rcfile_nonstd_partitions_loc.q              |   14 +-
 .../results/clientpositive/create_like.q.out    |   15 +-
 .../clientpositive/orc_format_part.q.out        |   24 +-
 .../orc_nonstd_partitions_loc.q.out             |   28 +-
 .../clientpositive/rcfile_format_part.q.out     |   24 +-
 .../rcfile_nonstd_partitions_loc.q.out          |   28 +-
 .../hive/minikdc/TestHiveAuthFactory.java       |    5 +-
 .../hive/minikdc/TestJdbcWithMiniKdc.java       |    4 +-
 .../hadoop/hive/common/TestFileUtils.java       |   18 +
 .../hive/thrift/TestHadoopAuthBridge23.java     |    4 +-
 .../hive/llap/ext/TestLlapInputSplit.java       |    6 +-
 .../metastore/TestEmbeddedHiveMetaStore.java    |   18 +-
 .../hive/metastore/TestHiveMetaStore.java       |   49 +
 .../hive/metastore/TestRemoteHiveMetaStore.java |   11 +-
 .../hive/metastore/TestSetUGIOnOnlyClient.java  |    9 +-
 .../hive/metastore/TestSetUGIOnOnlyServer.java  |    9 +-
 .../hive/ql/TestReplicationScenarios.java       | 1855 ----------
 .../hadoop/hive/ql/history/TestHiveHistory.java |    5 +-
 .../hive/ql/parse/TestReplicationScenarios.java | 1975 ++++++++++
 ...TestReplicationScenariosAcrossInstances.java |   91 +
 .../hadoop/hive/ql/parse/WarehouseInstance.java |  191 +
 .../hive/beeline/TestBeeLineWithArgs.java       |   58 +-
 .../org/apache/hive/beeline/TestSchemaTool.java |   31 +-
 .../apache/hive/jdbc/TestJdbcWithMiniLlap.java  |  161 +
 .../thrift/TestThriftCLIServiceWithBinary.java  |    5 +-
 .../thrift/TestThriftCLIServiceWithHttp.java    |    5 +-
 .../TestThriftHttpCLIServiceFeatures.java       |    5 +-
 .../thrift/ThriftCliServiceMessageSizeTest.java |    5 +-
 .../hive/cli/TestMiniSparkOnYarnCliDriver.java  |   18 +
 .../hadoop/hive/cli/TestSparkCliDriver.java     |   18 +
 .../hive/cli/TestSparkNegativeCliDriver.java    |   18 +
 .../test/resources/testconfiguration.properties |    7 +-
 .../hive/cli/control/CorePerfCliDriver.java     |    3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |    7 +-
 .../java/org/apache/hive/beeline/QFile.java     |   20 +-
 .../apache/hive/beeline/QFileBeeLineClient.java |    8 +-
 .../hive/storage/jdbc/JdbcInputFormat.java      |   15 +-
 .../hive/storage/jdbc/JdbcRecordReader.java     |    4 +-
 .../org/apache/hive/storage/jdbc/JdbcSerDe.java |    4 +-
 .../hive/storage/jdbc/JdbcStorageHandler.java   |   39 +-
 .../hive/storage/jdbc/conf/DatabaseType.java    |    6 +-
 .../jdbc/conf/JdbcStorageConfigManager.java     |   98 +-
 .../jdbc/dao/GenericJdbcDatabaseAccessor.java   |   18 +-
 .../storage/jdbc/dao/JdbcRecordIterator.java    |    3 -
 .../storage/jdbc/dao/MySqlDatabaseAccessor.java |    2 +-
 .../config/JdbcStorageConfigManagerTest.java    |   12 +-
 .../org/apache/hive/jdbc/HiveBaseResultSet.java |    3 +
 .../org/apache/hive/jdbc/HiveConnection.java    |   10 +-
 .../org/apache/hive/jdbc/HiveStatement.java     |   21 +-
 .../java/org/apache/hive/jdbc/JdbcColumn.java   |   11 +
 .../hadoop/hive/llap/LlapBaseRecordReader.java  |   48 +-
 .../hadoop/hive/llap/LlapRowRecordReader.java   |  126 +-
 .../ext/LlapTaskUmbilicalExternalClient.java    |   89 +-
 .../hive/llap/tez/LlapProtocolClientProxy.java  |    5 +-
 .../helpers/LlapTaskUmbilicalServer.java        |    4 +
 .../org/apache/hadoop/hive/llap/FieldDesc.java  |   20 +-
 .../java/org/apache/hadoop/hive/llap/Row.java   |  252 +-
 .../org/apache/hadoop/hive/llap/TypeDesc.java   |  108 -
 .../hadoop/hive/llap/io/ChunkedInputStream.java |   97 +
 .../hive/llap/io/ChunkedOutputStream.java       |  103 +
 .../org/apache/hadoop/hive/llap/TestRow.java    |   15 +-
 .../hive/llap/io/TestChunkedInputStream.java    |  254 ++
 .../hadoop/hive/llap/cli/LlapServiceDriver.java |    4 +-
 .../hadoop/hive/llap/daemon/impl/QueryInfo.java |   44 +-
 .../hive/llap/daemon/impl/QueryTracker.java     |    4 +-
 .../llap/daemon/impl/TaskRunnerCallable.java    |    1 +
 .../llap/shufflehandler/ShuffleHandler.java     |    4 +-
 llap-server/src/main/resources/package.py       |    6 +-
 .../daemon/impl/TaskExecutorTestHelpers.java    |    5 +-
 metastore/if/hive_metastore.thrift              |    2 +
 .../upgrade/derby/041-HIVE-16556.derby.sql      |    5 +
 .../upgrade/derby/hive-schema-3.0.0.derby.sql   |   12 +-
 .../derby/upgrade-2.3.0-to-3.0.0.derby.sql      |    2 +
 .../upgrade/hive/hive-schema-3.0.0.hive.sql     | 1223 +++++++
 .../scripts/upgrade/hive/upgrade.order.hive     |    0
 .../upgrade/mssql/026-HIVE-16556.mssql.sql      |   10 +
 .../upgrade/mssql/hive-schema-3.0.0.mssql.sql   |   10 +
 .../mssql/upgrade-2.3.0-to-3.0.0.mssql.sql      |    2 +
 .../upgrade/mysql/041-HIVE-16556.mysql.sql      |   11 +
 .../upgrade/mysql/hive-schema-3.0.0.mysql.sql   |   12 +
 .../mysql/upgrade-2.3.0-to-3.0.0.mysql.sql      |    2 +
 .../upgrade/oracle/041-HIVE-16556.oracle.sql    |   11 +
 .../upgrade/oracle/hive-schema-3.0.0.oracle.sql |   11 +
 .../oracle/upgrade-2.3.0-to-3.0.0.oracle.sql    |    2 +
 .../postgres/040-HIVE-16556.postgres.sql        |   13 +
 .../postgres/hive-schema-3.0.0.postgres.sql     |   16 +-
 .../upgrade-2.3.0-to-3.0.0.postgres.sql         |    2 +
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  |  390 ++
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h    |  122 +
 .../ThriftHiveMetastore_server.skeleton.cpp     |    5 +
 .../hive/metastore/api/ThriftHiveMetastore.java | 1767 ++++++---
 .../gen-php/metastore/ThriftHiveMetastore.php   |  208 ++
 .../hive_metastore/ThriftHiveMetastore-remote   |    7 +
 .../hive_metastore/ThriftHiveMetastore.py       |  178 +
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   60 +
 .../hadoop/hive/metastore/HiveAlterHandler.java |  143 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |   10 +
 .../hive/metastore/HiveMetaStoreClient.java     |    5 +
 .../hadoop/hive/metastore/IMetaStoreClient.java |    8 +
 .../hadoop/hive/metastore/ObjectStore.java      |   84 +-
 .../apache/hadoop/hive/metastore/RawStore.java  |    7 +
 .../hadoop/hive/metastore/cache/CacheUtils.java |    2 +-
 .../hive/metastore/cache/CachedStore.java       |   18 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |    5 +
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |    3 +-
 .../metastore/model/MMetastoreDBProperties.java |   56 +
 metastore/src/model/package.jdo                 |   18 +
 .../DummyRawStoreControlledCommit.java          |    5 +
 .../DummyRawStoreForJdoConnection.java          |    7 +-
 pom.xml                                         |    3 +-
 .../hive/llap/LlapOutputFormatService.java      |    5 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |   30 +-
 .../org/apache/hadoop/hive/ql/QueryState.java   |  146 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  106 +-
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |   25 +-
 .../hadoop/hive/ql/exec/FetchOperator.java      |    6 +
 .../hadoop/hive/ql/exec/FileSinkOperator.java   |   22 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |    4 +-
 .../hadoop/hive/ql/exec/GroupByOperator.java    |    4 +-
 .../hadoop/hive/ql/exec/OperatorFactory.java    |   20 +-
 .../hive/ql/exec/SerializationUtilities.java    |   18 +
 .../hadoop/hive/ql/exec/StatsNoJobTask.java     |    5 +
 .../apache/hadoop/hive/ql/exec/Utilities.java   |   45 +-
 .../hadoop/hive/ql/exec/mr/MapRedTask.java      |    2 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java    |   32 +-
 .../hadoop/hive/ql/exec/tez/DagUtils.java       |    3 +-
 .../ql/exec/tez/monitoring/TezJobMonitor.java   |   30 +-
 .../VectorReduceSinkCommonOperator.java         |   79 +-
 .../VectorReduceSinkEmptyKeyOperator.java       |  177 +
 .../VectorReduceSinkLongOperator.java           |    3 +-
 .../VectorReduceSinkMultiKeyOperator.java       |    4 +-
 .../VectorReduceSinkObjectHashOperator.java     |  126 +-
 .../VectorReduceSinkStringOperator.java         |    3 +-
 .../VectorReduceSinkUniformHashOperator.java    |   48 +-
 .../hive/ql/index/HiveIndexedInputFormat.java   |    7 +-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |   16 +-
 .../ql/io/parquet/MapredParquetInputFormat.java |   47 +
 .../ql/io/parquet/ParquetRecordReaderBase.java  |    3 +-
 .../hive/ql/io/parquet/ProjectionPusher.java    |    9 +-
 .../ql/io/parquet/serde/ParquetHiveSerDe.java   |    9 +
 .../ql/io/parquet/serde/ParquetTableUtils.java  |   22 +
 .../ql/io/parquet/timestamp/NanoTimeUtils.java  |    5 +-
 .../ql/io/rcfile/stats/PartialScanTask.java     |    3 +-
 .../hadoop/hive/ql/log/LogDivertAppender.java   |    2 +-
 .../hive/ql/log/LogDivertAppenderForTest.java   |    5 +-
 .../hive/ql/metadata/DefaultStorageHandler.java |    5 +
 .../metadata/HiveMaterializedViewsRegistry.java |    5 +-
 .../hive/ql/metadata/HiveStorageHandler.java    |    6 +
 .../hadoop/hive/ql/metadata/Partition.java      |   10 +-
 .../AnnotateReduceSinkOutputOperator.java       |   73 -
 .../hive/ql/optimizer/ConvertJoinMapJoin.java   |    9 +-
 .../DynamicPartitionPruningOptimization.java    |  147 +-
 .../hive/ql/optimizer/MapJoinProcessor.java     |   11 +-
 .../hadoop/hive/ql/optimizer/Optimizer.java     |    4 -
 .../ql/optimizer/ReduceSinkMapJoinProc.java     |    6 +-
 .../hive/ql/optimizer/SharedScanOptimizer.java  |  625 ++++
 .../optimizer/calcite/HivePlannerContext.java   |   12 +-
 .../ql/optimizer/calcite/RelOptHiveTable.java   |    2 +-
 .../hive/ql/optimizer/calcite/SubqueryConf.java |   42 +
 .../calcite/rules/HiveSubQueryRemoveRule.java   |   59 +-
 .../calcite/translator/TypeConverter.java       |    3 +
 .../optimizer/physical/CrossProductCheck.java   |    3 +-
 .../physical/GenMRSkewJoinProcessor.java        |    2 +-
 .../physical/GenSparkSkewJoinProcessor.java     |    2 +-
 .../physical/SparkCrossProductCheck.java        |    3 +-
 .../hive/ql/optimizer/physical/Vectorizer.java  |  219 +-
 .../ppr/PartitionExpressionForMetastore.java    |    8 +
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |   25 +-
 .../ql/parse/ColumnStatsAutoGatherContext.java  |    9 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |    5 +-
 .../apache/hadoop/hive/ql/parse/EximUtil.java   |    6 +-
 .../apache/hadoop/hive/ql/parse/GenTezWork.java |   10 +-
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |    3 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |    6 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g    |    2 +
 .../hadoop/hive/ql/parse/ParseContext.java      |    6 +-
 .../apache/hadoop/hive/ql/parse/QBSubQuery.java |   62 +-
 .../ql/parse/ReplicationSemanticAnalyzer.java   |   56 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   92 +-
 .../hadoop/hive/ql/parse/SemiJoinHint.java      |    8 +-
 .../hadoop/hive/ql/parse/TezCompiler.java       |   81 +-
 .../hive/ql/parse/TypeCheckProcFactory.java     |    2 +
 .../hadoop/hive/ql/parse/UnparseTranslator.java |    1 -
 .../hadoop/hive/ql/parse/repl/DumpType.java     |  120 +-
 .../repl/dump/events/AbstractEventHandler.java  |   46 +
 .../parse/repl/dump/events/AbstractHandler.java |   46 -
 .../repl/dump/events/AddPartitionHandler.java   |    2 +-
 .../repl/dump/events/AlterPartitionHandler.java |    2 +-
 .../repl/dump/events/AlterTableHandler.java     |    2 +-
 .../repl/dump/events/CreateFunctionHandler.java |   21 +-
 .../repl/dump/events/CreateTableHandler.java    |    2 +-
 .../parse/repl/dump/events/DefaultHandler.java  |    2 +-
 .../repl/dump/events/DropPartitionHandler.java  |    2 +-
 .../repl/dump/events/DropTableHandler.java      |    2 +-
 .../parse/repl/dump/events/InsertHandler.java   |    2 +-
 .../hive/ql/parse/repl/load/DumpMetaData.java   |    1 -
 .../load/message/CreateFunctionHandler.java     |   68 +
 .../parse/repl/load/message/DefaultHandler.java |    2 +-
 .../repl/load/message/DropPartitionHandler.java |    2 +-
 .../repl/load/message/DropTableHandler.java     |    2 +-
 .../parse/repl/load/message/InsertHandler.java  |    2 +-
 .../load/message/MessageHandlerFactory.java     |   79 -
 .../load/message/RenamePartitionHandler.java    |    2 +-
 .../repl/load/message/RenameTableHandler.java   |    2 +-
 .../parse/repl/load/message/TableHandler.java   |    2 +-
 .../load/message/TruncatePartitionHandler.java  |    2 +-
 .../repl/load/message/TruncateTableHandler.java |    2 +-
 .../hadoop/hive/ql/plan/ExprNodeDescUtils.java  |   68 +-
 .../apache/hadoop/hive/ql/plan/JoinDesc.java    |   12 +
 .../apache/hadoop/hive/ql/plan/MapJoinDesc.java |    3 +-
 .../hadoop/hive/ql/plan/PartitionDesc.java      |    5 +-
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   |   15 +
 .../hadoop/hive/ql/plan/ReduceSinkDesc.java     |   69 +-
 .../apache/hadoop/hive/ql/plan/SparkWork.java   |    6 +-
 .../apache/hadoop/hive/ql/plan/TableDesc.java   |    9 +
 .../hive/ql/plan/VectorReduceSinkDesc.java      |   28 +
 .../hadoop/hive/ql/ppd/OpProcFactory.java       |   20 +-
 .../plugin/sqlstd/Operation2Privilege.java      |    1 +
 .../hadoop/hive/ql/session/SessionState.java    |   89 +-
 .../apache/hadoop/hive/ql/stats/StatsUtils.java |   16 +-
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |    1 +
 .../apache/hadoop/hive/ql/udf/UDFToString.java  |   10 +
 .../hadoop/hive/ql/udf/generic/GenericUDF.java  |    3 +
 .../hive/ql/udf/generic/GenericUDFDate.java     |    2 +
 .../ql/udf/generic/GenericUDFToTimestampTZ.java |   89 +
 .../ql/udf/generic/GenericUDFUnixTimeStamp.java |    9 +-
 .../ql/udf/generic/GenericUDTFGetSplits.java    |   71 +-
 .../hive/ql/exec/InputEstimatorTestClass.java   |    5 +
 .../hadoop/hive/ql/exec/TestExecDriver.java     |    3 +-
 .../exec/TestMsckCreatePartitionsInBatches.java |  340 ++
 .../hadoop/hive/ql/exec/TestOperatorNames.java  |   18 +
 .../hive/ql/io/orc/TestInputOutputFormat.java   |    1 -
 .../io/parquet/AbstractTestParquetDirect.java   |    7 +-
 .../io/parquet/TestParquetRowGroupFilter.java   |    3 +
 .../parquet/VectorizedColumnReaderTestBase.java |   13 +
 .../io/parquet/timestamp/TestNanoTimeUtils.java |   12 +-
 .../calcite/TestCBORuleFiredOnlyOnce.java       |    3 +-
 .../ql/parse/TestMacroSemanticAnalyzer.java     |    2 +-
 .../hadoop/hive/ql/parse/TestQBCompact.java     |    2 +-
 .../ql/parse/TestQBJoinTreeApplyPredicate.java  |    3 +-
 .../hadoop/hive/ql/parse/TestQBSubQuery.java    |    3 +-
 .../parse/TestReplicationSemanticAnalyzer.java  |    3 +-
 .../TestSQL11ReservedKeyWordsNegative.java      |   17 +-
 .../ql/parse/TestSemanticAnalyzerFactory.java   |    2 +-
 .../parse/TestUpdateDeleteSemanticAnalyzer.java |    2 +-
 .../TestHiveAuthorizationTaskFactory.java       |    2 +-
 .../parse/authorization/TestPrivilegesV1.java   |    4 +-
 .../parse/authorization/TestPrivilegesV2.java   |    2 +-
 .../ql/parse/repl/dump/HiveWrapperTest.java     |   18 +
 .../TestExecutionEngineWorkConcurrency.java     |  119 +
 .../hive/ql/plan/TestTezWorkConcurrency.java    |   82 -
 .../parquet_int96_alter_invalid_timezone.q      |    5 +
 .../parquet_int96_create_invalid_timezone.q     |    3 +
 .../test/queries/clientnegative/serde_regex.q   |    2 +-
 .../test/queries/clientnegative/serde_regex2.q  |    4 +-
 .../test/queries/clientnegative/serde_regex3.q  |    2 +-
 .../subquery_scalar_corr_multi_rows.q           |    3 +-
 .../clientnegative/truncate_table_failure5.q    |    5 +
 .../clientnegative/truncate_table_failure6.q    |    5 +
 .../clientpositive/alter_partition_change_col.q |    1 +
 .../clientpositive/alter_table_column_stats.q   |   68 +-
 .../test/queries/clientpositive/create_like.q   |    2 +-
 .../dynamic_semijoin_reduction_2.q              |   48 +
 .../clientpositive/explain_formatted_oid.q      |   18 -
 .../test/queries/clientpositive/jdbc_handler.q  |   23 +-
 ql/src/test/queries/clientpositive/join43.q     |   38 +-
 .../test/queries/clientpositive/msck_repair_0.q |   10 +
 .../test/queries/clientpositive/msck_repair_1.q |    8 +
 .../test/queries/clientpositive/msck_repair_2.q |    5 +
 .../test/queries/clientpositive/msck_repair_3.q |    4 +
 .../clientpositive/msck_repair_batchsize.q      |    4 +
 .../clientpositive/parquet_int96_timestamp.q    |   21 +
 .../test/queries/clientpositive/perf/query88.q  |    2 +
 .../rename_external_partition_location.q        |   11 +
 .../test/queries/clientpositive/semijoin_hint.q |   27 +-
 .../test/queries/clientpositive/serde_regex.q   |    8 +-
 ql/src/test/queries/clientpositive/sysdb.q      |  117 +
 .../test/queries/clientpositive/timestamptz.q   |   11 +
 .../test/queries/clientpositive/timestamptz_1.q |   25 +
 .../test/queries/clientpositive/timestamptz_2.q |   19 +
 ql/src/test/queries/clientpositive/transform3.q |    6 +
 .../clientpositive/vector_windowing_navfn.q     |  134 +
 .../queries/clientpositive/windowing_navfn.q    |    2 +
 .../parquet_int96_alter_invalid_timezone.q.out  |   13 +
 .../parquet_int96_create_invalid_timezone.q.out |    5 +
 .../results/clientnegative/serde_regex.q.out    |    2 +-
 .../results/clientnegative/serde_regex2.q.out   |    6 +-
 .../results/clientnegative/serde_regex3.q.out   |    2 +-
 .../subquery_scalar_corr_multi_rows.q.out       |    3 +-
 .../truncate_table_failure5.q.out               |   17 +
 .../truncate_table_failure6.q.out               |   17 +
 .../alter_partition_change_col.q.out            |   20 +
 .../alter_table_column_stats.q.out              |  486 ++-
 .../clientpositive/beeline/mapjoin2.q.out       |   91 +
 .../materialized_view_create_rewrite.q.out      |  322 ++
 .../beeline/udf_unix_timestamp.q.out            |  117 +
 .../results/clientpositive/create_like.q.out    |    4 +-
 .../clientpositive/explain_formatted_oid.q.out  |   38 -
 ql/src/test/results/clientpositive/join0.q.out  |    2 +-
 ql/src/test/results/clientpositive/join43.q.out |   76 +-
 .../clientpositive/llap/auto_join0.q.out        |   15 +-
 .../clientpositive/llap/auto_join30.q.out       |   67 +-
 .../llap/auto_sortmerge_join_9.q.out            |   70 +-
 .../llap/bucket_map_join_tez1.q.out             |   11 +-
 .../llap/correlationoptimizer2.q.out            |   26 +-
 .../llap/correlationoptimizer3.q.out            |  188 +-
 .../llap/correlationoptimizer6.q.out            |  104 +-
 .../llap/dynamic_partition_pruning.q.out        |   92 +-
 .../llap/dynamic_semijoin_reduction.q.out       |    4 +-
 .../llap/dynamic_semijoin_reduction_2.q.out     |  309 +-
 .../llap/dynamic_semijoin_user_level.q.out      |    2 +-
 .../clientpositive/llap/except_distinct.q.out   |   92 +-
 .../clientpositive/llap/explainuser_1.q.out     |  137 +-
 .../clientpositive/llap/explainuser_2.q.out     |  772 ++--
 .../clientpositive/llap/intersect_merge.q.out   |   83 +-
 .../clientpositive/llap/jdbc_handler.q.out      |   42 +-
 .../results/clientpositive/llap/join46.q.out    |   38 +-
 .../llap/limit_join_transpose.q.out             |  242 +-
 .../clientpositive/llap/limit_pushdown.q.out    |   17 +-
 .../clientpositive/llap/llap_nullscan.q.out     |   15 +-
 .../llap/llap_vector_nohybridgrace.q.out        |    4 +-
 .../results/clientpositive/llap/mapjoin46.q.out |   43 +-
 .../test/results/clientpositive/llap/mrr.q.out  |   82 +-
 .../clientpositive/llap/multiMapJoin2.q.out     |  209 +-
 .../llap/offset_limit_ppd_optimizer.q.out       |   17 +-
 .../clientpositive/llap/semijoin_hint.q.out     |  185 +-
 .../clientpositive/llap/subquery_in.q.out       |  173 +-
 .../clientpositive/llap/subquery_multi.q.out    |  617 ++--
 .../clientpositive/llap/subquery_notin.q.out    | 1539 ++++----
 .../clientpositive/llap/subquery_null_agg.q.out |   26 +-
 .../clientpositive/llap/subquery_scalar.q.out   | 2724 +++-----------
 .../clientpositive/llap/subquery_select.q.out   | 3357 ++++-------------
 .../clientpositive/llap/subquery_views.q.out    |  274 +-
 .../results/clientpositive/llap/sysdb.q.out     | 3447 ++++++++++++++++++
 .../clientpositive/llap/tez_join_tests.q.out    |   29 +-
 .../clientpositive/llap/tez_joins_explain.q.out |   29 +-
 .../clientpositive/llap/unionDistinct_1.q.out   |  261 +-
 .../clientpositive/llap/union_top_level.q.out   |   50 +-
 .../llap/vector_aggregate_without_gby.q.out     |   12 +-
 .../llap/vector_auto_smb_mapjoin_14.q.out       |    2 +-
 .../llap/vector_between_columns.q.out           |    4 +-
 .../clientpositive/llap/vector_between_in.q.out |    6 +-
 .../llap/vector_binary_join_groupby.q.out       |    2 +-
 .../clientpositive/llap/vector_bround.q.out     |    4 +-
 .../llap/vector_char_simple.q.out               |    2 +-
 .../clientpositive/llap/vector_coalesce.q.out   |    4 +-
 .../llap/vector_count_distinct.q.out            |    2 +-
 .../llap/vector_empty_where.q.out               |    8 +-
 .../llap/vector_groupby_grouping_id3.q.out      |    4 +
 .../llap/vector_groupby_grouping_sets4.q.out    |   43 +-
 .../llap/vector_groupby_mapjoin.q.out           |   30 +-
 .../clientpositive/llap/vector_inner_join.q.out |   18 +
 .../clientpositive/llap/vector_join30.q.out     |  130 +-
 .../llap/vector_join_part_col_char.q.out        |   18 +-
 .../llap/vector_leftsemi_mapjoin.q.out          |  282 ++
 .../clientpositive/llap/vector_order_null.q.out |   22 +
 .../llap/vector_outer_join0.q.out               |    4 +
 .../llap/vector_outer_join1.q.out               |   12 +-
 .../llap/vector_outer_join2.q.out               |    8 +-
 .../llap/vector_partition_diff_num_cols.q.out   |   10 +-
 .../llap/vector_ptf_part_simple.q.out           |   39 +
 .../llap/vector_tablesample_rows.q.out          |    2 +-
 .../llap/vector_varchar_simple.q.out            |    2 +-
 .../llap/vector_windowing_navfn.q.out           | 2113 +++++++++++
 .../clientpositive/llap/vectorization_0.q.out   |   12 +-
 .../llap/vectorization_limit.q.out              |   11 +
 .../llap/vectorization_short_regress.q.out      |   16 +-
 .../clientpositive/llap/vectorized_case.q.out   |    4 +-
 .../llap/vectorized_date_funcs.q.out            |    2 +-
 .../vectorized_dynamic_partition_pruning.q.out  |  108 +-
 .../vectorized_dynamic_semijoin_reduction.q.out |   32 +-
 .../llap/vectorized_mapjoin2.q.out              |    2 +-
 .../clientpositive/llap/vectorized_ptf.q.out    |   64 +
 .../llap/vectorized_timestamp.q.out             |   22 +-
 .../llap/vectorized_timestamp_funcs.q.out       |    2 +-
 .../results/clientpositive/msck_repair_0.q.out  |   38 +-
 .../results/clientpositive/msck_repair_1.q.out  |   28 +-
 .../results/clientpositive/msck_repair_2.q.out  |   28 +-
 .../results/clientpositive/msck_repair_3.q.out  |   28 +-
 .../clientpositive/msck_repair_batchsize.q.out  |   34 +-
 .../results/clientpositive/parallel_join0.q.out |    2 +-
 .../parquet_int96_timestamp.q.out               |  191 +-
 .../results/clientpositive/perf/query1.q.out    |  248 +-
 .../results/clientpositive/perf/query14.q.out   | 2032 +++++------
 .../results/clientpositive/perf/query16.q.out   |   27 +-
 .../results/clientpositive/perf/query17.q.out   |   68 +-
 .../results/clientpositive/perf/query23.q.out   |  477 ++-
 .../results/clientpositive/perf/query25.q.out   |   68 +-
 .../results/clientpositive/perf/query28.q.out   |   87 +-
 .../results/clientpositive/perf/query29.q.out   |   19 +-
 .../results/clientpositive/perf/query30.q.out   |  327 +-
 .../results/clientpositive/perf/query31.q.out   |  270 +-
 .../results/clientpositive/perf/query32.q.out   |   34 +-
 .../results/clientpositive/perf/query33.q.out   |  252 +-
 .../results/clientpositive/perf/query38.q.out   |   94 +-
 .../results/clientpositive/perf/query39.q.out   |   58 +-
 .../results/clientpositive/perf/query46.q.out   |   75 +-
 .../results/clientpositive/perf/query5.q.out    |   70 +-
 .../results/clientpositive/perf/query51.q.out   |   83 +-
 .../results/clientpositive/perf/query56.q.out   |  252 +-
 .../results/clientpositive/perf/query58.q.out   |  120 +-
 .../results/clientpositive/perf/query6.q.out    |  348 +-
 .../results/clientpositive/perf/query60.q.out   |  252 +-
 .../results/clientpositive/perf/query64.q.out   |  443 ++-
 .../results/clientpositive/perf/query65.q.out   |   86 +-
 .../results/clientpositive/perf/query66.q.out   |   90 +-
 .../results/clientpositive/perf/query68.q.out   |   75 +-
 .../results/clientpositive/perf/query69.q.out   |   76 +-
 .../results/clientpositive/perf/query70.q.out   |   44 +-
 .../results/clientpositive/perf/query75.q.out   |  258 +-
 .../results/clientpositive/perf/query76.q.out   |  126 +-
 .../results/clientpositive/perf/query80.q.out   |  106 +-
 .../results/clientpositive/perf/query81.q.out   |  320 +-
 .../results/clientpositive/perf/query83.q.out   |  225 +-
 .../results/clientpositive/perf/query85.q.out   |   11 +-
 .../results/clientpositive/perf/query87.q.out   |   94 +-
 .../results/clientpositive/perf/query88.q.out   |  358 +-
 .../results/clientpositive/perf/query9.q.out    | 1009 ++---
 .../results/clientpositive/perf/query90.q.out   |   58 +-
 .../results/clientpositive/perf/query92.q.out   |   35 +-
 .../results/clientpositive/perf/query95.q.out   |   70 +-
 .../results/clientpositive/perf/query97.q.out   |   35 +-
 .../rename_external_partition_location.q.out    |  208 ++
 ql/src/test/results/clientpositive/repair.q.out |    3 +-
 .../results/clientpositive/serde_regex.q.out    |   16 +-
 .../spark/parquet_int96_timestamp.q.out         |  718 ++++
 .../spark/vector_between_in.q.out               |    6 +-
 .../spark/vector_count_distinct.q.out           |    2 +-
 .../spark/vector_outer_join1.q.out              |    4 +-
 .../spark/vector_outer_join2.q.out              |    4 +-
 .../clientpositive/spark/vectorization_0.q.out  |   12 +-
 .../spark/vectorization_short_regress.q.out     |   16 +-
 .../clientpositive/spark/vectorized_case.q.out  |    4 +-
 .../clientpositive/spark/vectorized_ptf.q.out   |   75 +
 .../spark/vectorized_timestamp_funcs.q.out      |    2 +-
 .../clientpositive/tez/explainanalyze_2.q.out   |  412 +--
 .../clientpositive/tez/explainuser_3.q.out      |   43 +-
 .../tez/vector_join_part_col_char.q.out         |   18 +-
 .../tez/vectorization_limit.q.out               |   11 +
 .../results/clientpositive/timestamptz.q.out    |  124 +
 .../results/clientpositive/timestamptz_1.q.out  |  156 +
 .../results/clientpositive/timestamptz_2.q.out  |   78 +
 .../results/clientpositive/transform3.q.out     |   28 +
 .../clientpositive/vector_outer_join3.q.out     |    6 +-
 .../clientpositive/vector_outer_join4.q.out     |    6 +-
 .../clientpositive/vector_outer_join6.q.out     |    4 +-
 .../clientpositive/windowing_navfn.q.out        |   62 +
 serde/if/serde.thrift                           |    2 +
 .../src/gen/thrift/gen-cpp/serde_constants.cpp  |    3 +
 serde/src/gen/thrift/gen-cpp/serde_constants.h  |    1 +
 .../hadoop/hive/serde/serdeConstants.java       |    3 +
 .../org/apache/hadoop/hive/serde/Types.php      |    6 +
 .../org_apache_hadoop_hive_serde/constants.py   |    2 +
 serde/src/gen/thrift/gen-rb/serde_constants.rb  |    3 +
 .../hadoop/hive/serde2/AbstractSerDe.java       |    2 +-
 .../hive/serde2/ColumnProjectionUtils.java      |    8 +-
 .../apache/hadoop/hive/serde2/SerDeUtils.java   |    9 +-
 .../serde2/avro/AvroLazyObjectInspector.java    |    2 +-
 .../hive/serde2/avro/AvroSchemaRetriever.java   |    2 +-
 .../binarysortable/BinarySortableSerDe.java     |   26 +-
 .../hive/serde2/columnar/ColumnarSerDe.java     |    4 +-
 .../serde2/columnar/ColumnarStructBase.java     |    2 +-
 .../dynamic_type/DynamicSerDeStructBase.java    |    2 +-
 .../serde2/dynamic_type/ParseException.java     |    2 +-
 .../hive/serde2/fast/DeserializeRead.java       |    6 +-
 .../hive/serde2/io/TimestampTZWritable.java     |  427 +++
 .../hive/serde2/io/TimestampWritable.java       |    6 +-
 .../hadoop/hive/serde2/lazy/LazyDate.java       |    2 +-
 .../hadoop/hive/serde2/lazy/LazyFactory.java    |   24 +-
 .../hive/serde2/lazy/LazyHiveDecimal.java       |    2 +-
 .../hive/serde2/lazy/LazySimpleSerDe.java       |   12 +-
 .../hive/serde2/lazy/LazyTimestampTZ.java       |   91 +
 .../hadoop/hive/serde2/lazy/LazyUtils.java      |   11 +-
 .../LazyPrimitiveObjectInspectorFactory.java    |    4 +
 .../LazyTimestampTZObjectInspector.java         |   43 +
 .../hive/serde2/lazybinary/LazyBinaryArray.java |    2 +-
 .../serde2/lazybinary/LazyBinaryFactory.java    |    3 +
 .../hive/serde2/lazybinary/LazyBinaryMap.java   |    2 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java |   13 +-
 .../serde2/lazybinary/LazyBinaryString.java     |    2 +-
 .../serde2/lazybinary/LazyBinaryStruct.java     |    2 +-
 .../lazybinary/LazyBinaryTimestampTZ.java       |   36 +
 .../hive/serde2/lazybinary/LazyBinaryUnion.java |    2 +-
 .../hive/serde2/lazybinary/LazyBinaryUtils.java |    7 +-
 .../serde2/objectinspector/ObjectInspector.java |    2 +-
 .../ObjectInspectorConverters.java              |    4 +
 .../objectinspector/ObjectInspectorUtils.java   |   23 +-
 .../PrimitiveObjectInspector.java               |    4 +-
 .../primitive/JavaBinaryObjectInspector.java    |   18 +-
 .../JavaTimestampTZObjectInspector.java         |   76 +
 .../PrimitiveObjectInspectorConverter.java      |   26 +
 .../PrimitiveObjectInspectorFactory.java        |   20 +-
 .../PrimitiveObjectInspectorUtils.java          |  104 +-
 .../SettableTimestampTZObjectInspector.java     |   34 +
 .../primitive/TimestampTZObjectInspector.java   |   29 +
 ...tableConstantTimestampTZObjectInspector.java |   36 +
 .../WritableTimestampTZObjectInspector.java     |   79 +
 .../apache/hadoop/hive/serde2/thrift/Type.java  |    8 +
 .../hive/serde2/typeinfo/TypeInfoFactory.java   |    3 +
 .../hive/serde2/typeinfo/TypeInfoUtils.java     |    4 +-
 .../hadoop/hive/serde2/TestOpenCSVSerde.java    |   24 +-
 .../hive/serde2/io/TestTimestampTZWritable.java |  102 +
 service-rpc/if/TCLIService.thrift               |   10 +-
 .../thrift/gen-cpp/TCLIService_constants.cpp    |    2 +
 .../gen/thrift/gen-cpp/TCLIService_types.cpp    |   16 +-
 .../src/gen/thrift/gen-cpp/TCLIService_types.h  |    6 +-
 .../rpc/thrift/TCLIServiceConstants.java        |    2 +
 .../service/rpc/thrift/TProtocolVersion.java    |    5 +-
 .../apache/hive/service/rpc/thrift/TTypeId.java |    5 +-
 service-rpc/src/gen/thrift/gen-php/Types.php    |    6 +
 .../gen/thrift/gen-py/TCLIService/constants.py  |    2 +
 .../src/gen/thrift/gen-py/TCLIService/ttypes.py |    6 +
 .../thrift/gen-rb/t_c_l_i_service_constants.rb  |    2 +
 .../gen/thrift/gen-rb/t_c_l_i_service_types.rb  |   10 +-
 .../hive/service/auth/HiveAuthConstants.java    |   43 +
 .../hive/service/auth/HiveAuthFactory.java      |   52 +-
 .../apache/hive/service/cli/ColumnValue.java    |   11 +
 .../apache/hive/service/cli/TypeDescriptor.java |    2 +
 .../hive/service/cli/operation/Operation.java   |   12 +-
 .../service/cli/operation/SQLOperation.java     |    6 +-
 .../service/cli/thrift/ThriftCLIService.java    |    7 +-
 .../service/cli/thrift/ThriftHttpServlet.java   |    7 +-
 .../cli/TestRetryingThriftCLIServiceClient.java |    4 +-
 .../service/cli/session/TestSessionHooks.java   |    4 +-
 .../thrift/ThriftCliServiceTestWithCookie.java  |    5 +-
 .../apache/hadoop/hive/shims/HadoopShims.java   |    5 +-
 .../hive/thrift/HadoopThriftAuthBridge.java     |    4 +-
 .../TokenStoreDelegationTokenSecretManager.java |    2 +-
 .../hive/spark/client/SparkClientFactory.java   |   11 +-
 .../hive/common/type/FastHiveDecimalImpl.java   |    2 +-
 583 files changed, 31410 insertions(+), 18317 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/if/hive_metastore.thrift
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --cc metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 504946a,d9d50ab..e44cb9b
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@@ -6900,16 -6971,15 +6900,26 @@@ public class HiveMetaStore extends Thri
        return new ForeignKeysResponse(ret);
      }
  
 +    private void throwMetaException(Exception e) throws MetaException,
 +        NoSuchObjectException {
 +      if (e instanceof MetaException) {
 +        throw (MetaException) e;
 +      } else if (e instanceof NoSuchObjectException) {
 +        throw (NoSuchObjectException) e;
 +      } else {
 +        throw newMetaException(e);
 +      }
 +    }
++
+     @Override
+     public String get_metastore_db_uuid() throws MetaException, TException {
+       try {
+         return getMS().getMetastoreDbUuid();
+       } catch (MetaException e) {
+         LOG.error("Exception thrown while querying metastore db uuid", e);
+         throw e;
+       }
+     }
    }
  
  

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --cc metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 35d876a,b28983f..344eb19
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@@ -25,8 -25,8 +25,10 @@@ import java.lang.reflect.Field
  import java.net.InetAddress;
  import java.net.URI;
  import java.nio.ByteBuffer;
 +import java.sql.Connection;
 +import java.sql.SQLException;
+ import java.time.LocalDateTime;
+ import java.time.format.DateTimeFormatter;
  import java.util.ArrayList;
  import java.util.Arrays;
  import java.util.Collection;

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/model/package.jdo
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index a2186cc,44655af..733595c
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@@ -257,8 -254,8 +259,9 @@@ import org.slf4j.Logger
  import org.slf4j.LoggerFactory;
  import org.stringtemplate.v4.ST;
  
+ import com.google.common.annotations.VisibleForTesting;
  import com.google.common.collect.Iterables;
 +import com.google.common.collect.Lists;
  
  /**
   * DDLTask implementation.
@@@ -3982,211 -4001,7 +3994,215 @@@ public class DDLTask extends Task<DDLWo
        throw new HiveException(ErrorMsg.UNSUPPORTED_ALTER_TBL_OP, alterTbl.getOp().toString());
      }
  
 -    return 0;
 +    return null;
 +  }
 +
 +  private List<Task<?>> alterTableDropProps(AlterTableDesc alterTbl, Table tbl,
 +      Partition part, EnvironmentContext environmentContext) throws HiveException {
 +    if (StatsSetupConst.USER.equals(environmentContext.getProperties()
 +        .get(StatsSetupConst.STATS_GENERATED))) {
 +      // drop a stats parameter, which triggers recompute stats update automatically
 +      environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
 +    }
 +
 +    List<Task<?>> result = null;
 +    if (part == null) {
 +      Set<String> removedSet = alterTbl.getProps().keySet();
 +      boolean isFromMmTable = MetaStoreUtils.isInsertOnlyTable(tbl.getParameters()),
 +          isRemoved = MetaStoreUtils.isRemovedInsertOnlyTable(removedSet);
 +      if (isFromMmTable && isRemoved) {
 +        result = generateRemoveMmTasks(tbl);
 +      }
 +    }
 +    Iterator<String> keyItr = alterTbl.getProps().keySet().iterator();
 +    while (keyItr.hasNext()) {
 +      if (part != null) {
 +        part.getTPartition().getParameters().remove(keyItr.next());
 +      } else {
 +        tbl.getTTable().getParameters().remove(keyItr.next());
 +      }
 +    }
 +    return result;
 +  }
 +
 +  private List<Task<?>> generateRemoveMmTasks(Table tbl) throws HiveException {
 +    // To avoid confusion from nested MM directories when table is converted back and forth, we
 +    // want to rename mm_ dirs to remove the prefix; however, given the unpredictable nested
 +    // directory handling in Hive/MR, we will instead move all the files into the root directory.
 +    // We will also delete any directories that are not committed. 
 +    // Note that this relies on locks. Note also that we only do the renames AFTER the metastore
 +    // operation commits. Deleting uncommitted things is safe, but moving stuff before we convert
 +    // could cause data loss.
 +    List<Path> allMmDirs = new ArrayList<>();
 +    if (tbl.isStoredAsSubDirectories()) {
 +      // TODO: support this? we only bail because it's a PITA and hardly anyone seems to care.
 +      throw new HiveException("Converting list bucketed tables stored as subdirectories "
 +          + " to and from MM is not supported");
 +    }
 +    List<String> bucketCols = tbl.getBucketCols();
 +    if (bucketCols != null && !bucketCols.isEmpty()
 +        && HiveConf.getBoolVar(conf, ConfVars.HIVE_STRICT_CHECKS_BUCKETING)) {
 +      throw new HiveException("Converting bucketed tables from MM is not supported by default; "
 +          + "copying files from multiple MM directories may potentially break the buckets. You "
 +          + "can set " + ConfVars.HIVE_STRICT_CHECKS_BUCKETING.varname
 +          + " to false for this query if you want to force the conversion.");
 +    }
 +    Hive db = getHive();
 +    String value = conf.get(ValidTxnList.VALID_TXNS_KEY);
 +    ValidTxnList validTxnList = value == null ? new ValidReadTxnList() : new ValidReadTxnList(value);
 +    if (tbl.getPartitionKeys().size() > 0) {
 +      PartitionIterable parts = new PartitionIterable(db, tbl, null,
 +          HiveConf.getIntVar(conf, ConfVars.METASTORE_BATCH_RETRIEVE_MAX));
 +      Iterator<Partition> partIter = parts.iterator();
 +      while (partIter.hasNext()) {
 +        Partition part = partIter.next();
 +        checkMmLb(part);
 +        handleRemoveMm(part.getDataLocation(), validTxnList, allMmDirs);
 +      }
 +    } else {
 +      checkMmLb(tbl);
 +      handleRemoveMm(tbl.getDataLocation(), validTxnList, allMmDirs);
 +    }
 +    List<Path> targetPaths = new ArrayList<>(allMmDirs.size());
 +    List<String> targetPrefix = new ArrayList<>(allMmDirs.size());
 +    int prefixLen = JavaUtils.DELTA_PREFIX.length();
 +    for (int i = 0; i < allMmDirs.size(); ++i) {
 +      Path src = allMmDirs.get(i);
 +      Path tgt = src.getParent();
 +      String prefix = src.getName().substring(prefixLen + 1) + "_";
 +      Utilities.LOG14535.info("Will move " + src + " to " + tgt + " (prefix " + prefix + ")");
 +      targetPaths.add(tgt);
 +      targetPrefix.add(prefix);
 +    }
 +    // Don't set inputs and outputs - the locks have already been taken so it's pointless.
 +    MoveWork mw = new MoveWork(null, null, null, null, false);
 +    mw.setMultiFilesDesc(new LoadMultiFilesDesc(
 +        allMmDirs, targetPaths, targetPrefix, true, null, null));
 +    return Lists.<Task<?>>newArrayList(TaskFactory.get(mw, conf));
 +  }
 +
 +  private void checkMmLb(Table tbl) throws HiveException {
 +    if (!tbl.isStoredAsSubDirectories()) return;
 +    // TODO: support this?
 +    throw new HiveException("Converting list bucketed tables stored as subdirectories "
 +        + " to and from MM is not supported");
 +  }
 +
 +  private void checkMmLb(Partition part) throws HiveException {
 +    if (!part.isStoredAsSubDirectories()) return;
 +    // TODO: support this?
 +    throw new HiveException("Converting list bucketed tables stored as subdirectories "
 +        + " to and from MM is not supported. Please create a table in the desired format.");
 +  }
 +
 +  private void handleRemoveMm(
 +      Path path, ValidTxnList validTxnList, List<Path> result) throws HiveException {
 +    // Note: doesn't take LB into account; that is not presently supported here (throws above).
 +    try {
 +      FileSystem fs = path.getFileSystem(conf);
 +      for (FileStatus file : fs.listStatus(path)) {
 +        Path childPath = file.getPath();
 +        if (!file.isDirectory()) {
 +          ensureDelete(fs, childPath, "a non-directory file");
 +          continue;
 +        }
 +        Long writeId = JavaUtils.extractTxnId(childPath);
 +        if (writeId == null) {
 +          ensureDelete(fs, childPath, "an unknown directory");
 +        } else if (!validTxnList.isTxnValid(writeId)) {
 +          // Assume no concurrent active writes - we rely on locks here. We could check and fail.
 +          ensureDelete(fs, childPath, "an uncommitted directory");
 +        } else {
 +          result.add(childPath);
 +        }
 +      }
 +    } catch (IOException ex) {
 +      throw new HiveException(ex);
 +    }
 +  }
 +
 +  private static void ensureDelete(FileSystem fs, Path path, String what) throws IOException {
 +    Utilities.LOG14535.info("Deleting " + what + " " + path);
 +    try {
 +      if (!fs.delete(path, true)) throw new IOException("delete returned false");
 +    } catch (Exception ex) {
 +      String error = "Couldn't delete " + path + "; cannot remove MM setting from the table";
 +      LOG.error(error, ex);
 +      throw (ex instanceof IOException) ? (IOException)ex : new IOException(ex);
 +    }
 +  }
 +
 +  private List<Task<?>> generateAddMmTasks(Table tbl) throws HiveException {
 +    // We will move all the files in the table/partition directories into the first MM
 +    // directory, then commit the first write ID.
 +    List<Path> srcs = new ArrayList<>(), tgts = new ArrayList<>();
 +    long mmWriteId = 0;
 +    try {
 +      HiveTxnManager txnManager = SessionState.get().getTxnMgr();
 +      mmWriteId = txnManager.openTxn(new Context(conf), conf.getUser());
 +      txnManager.commitTxn();
 +    } catch (Exception e) {
 +      String errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage();
 +      console.printError(errorMessage, "\n"
 +          + org.apache.hadoop.util.StringUtils.stringifyException(e));
 +    }
 +    int stmtId = 0;
 +    String mmDir = AcidUtils.deltaSubdir(mmWriteId, mmWriteId, stmtId);
 +    Hive db = getHive();
 +    if (tbl.getPartitionKeys().size() > 0) {
 +      PartitionIterable parts = new PartitionIterable(db, tbl, null,
 +          HiveConf.getIntVar(conf, ConfVars.METASTORE_BATCH_RETRIEVE_MAX));
 +      Iterator<Partition> partIter = parts.iterator();
 +      while (partIter.hasNext()) {
 +        Partition part = partIter.next();
 +        checkMmLb(part);
 +        Path src = part.getDataLocation(), tgt = new Path(src, mmDir);
 +        srcs.add(src);
 +        tgts.add(tgt);
 +        Utilities.LOG14535.info("Will move " + src + " to " + tgt);
 +      }
 +    } else {
 +      checkMmLb(tbl);
 +      Path src = tbl.getDataLocation(), tgt = new Path(src, mmDir);
 +      srcs.add(src);
 +      tgts.add(tgt);
 +      Utilities.LOG14535.info("Will move " + src + " to " + tgt);
 +    }
 +    // Don't set inputs and outputs - the locks have already been taken so it's pointless.
 +    MoveWork mw = new MoveWork(null, null, null, null, false);
 +    mw.setMultiFilesDesc(new LoadMultiFilesDesc(srcs, tgts, true, null, null));
 +    ImportCommitWork icw = new ImportCommitWork(tbl.getDbName(), tbl.getTableName(), mmWriteId, stmtId);
 +    Task<?> mv = TaskFactory.get(mw, conf), ic = TaskFactory.get(icw, conf);
 +    mv.addDependentTask(ic);
 +    return Lists.<Task<?>>newArrayList(mv);
 +  }
 +
 +  private List<Task<?>> alterTableAddProps(AlterTableDesc alterTbl, Table tbl,
 +      Partition part, EnvironmentContext environmentContext) throws HiveException {
 +    if (StatsSetupConst.USER.equals(environmentContext.getProperties()
 +        .get(StatsSetupConst.STATS_GENERATED))) {
 +      environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS);
 +    }
++    if(alterTbl.getProps().containsKey(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY)) {
++      NanoTimeUtils.validateTimeZone(
++          alterTbl.getProps().get(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY));
++    }
 +    List<Task<?>> result = null;
 +    if (part != null) {
 +      part.getTPartition().getParameters().putAll(alterTbl.getProps());
 +    } else {
 +      boolean isFromMmTable = MetaStoreUtils.isInsertOnlyTable(tbl.getParameters());
 +      Boolean isToMmTable = MetaStoreUtils.isToInsertOnlyTable(alterTbl.getProps());
 +      if (isToMmTable != null) {
 +        if (!isFromMmTable && isToMmTable) {
 +          result = generateAddMmTasks(tbl);
 +        } else if (isFromMmTable && !isToMmTable) {
 +          result = generateRemoveMmTasks(tbl);
 +        }
 +      }
 +      tbl.getTTable().getParameters().putAll(alterTbl.getProps());
 +    }
 +    return result;
    }
  
     private int dropConstraint(Hive db, AlterTableDesc alterTbl)

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
index 40330fa,a575cdd..b0a731e
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
@@@ -377,11 -371,10 +380,14 @@@ public class FetchOperator implements S
  
        Class<? extends InputFormat> formatter = currDesc.getInputFileFormatClass();
        Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job);
+       if (ParquetHiveSerDe.class.getName().equals(currDesc.getTableDesc().getSerdeClassName())) {
+         ParquetTableUtils.setParquetTimeZoneIfAbsent(job, currDesc.getTableDesc().getProperties());
+       }
        InputFormat inputFormat = getInputFormatFromCache(formatter, job);
 +      String inputs = processCurrPathForMmWriteIds(inputFormat);
 +      Utilities.LOG14535.info("Setting fetch inputs to " + inputs);
 +      if (inputs == null) return null;
 +      job.set("mapred.input.dir", inputs);
  
        InputSplit[] splits = inputFormat.getSplits(job, 1);
        FetchInputFormatSplit[] inputSplits = new FetchInputFormatSplit[splits.length];

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 03c50a7,ebf1344..b755e2d
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@@ -201,7 -145,8 +201,8 @@@ import org.apache.hadoop.mapred.RecordR
  import org.apache.hadoop.mapred.Reporter;
  import org.apache.hadoop.mapred.SequenceFileInputFormat;
  import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 -import org.apache.hadoop.security.Credentials;
 +import org.apache.hadoop.mapred.TextInputFormat;
+ import org.apache.hadoop.security.UserGroupInformation;
  import org.apache.hadoop.util.Progressable;
  import org.apache.hive.common.util.ACLConfigurationParser;
  import org.apache.hive.common.util.ReflectionUtil;

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index 8bcf8c7,21394c6..98ea141
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@@ -422,13 -421,13 +426,17 @@@ public class HiveInputFormat<K extends 
     */
    private void addSplitsForGroup(List<Path> dirs, TableScanOperator tableScan, JobConf conf,
        InputFormat inputFormat, Class<? extends InputFormat> inputFormatClass, int splits,
 -      TableDesc table, List<InputSplit> result) throws IOException {
 +      TableDesc table, List<InputSplit> result)
 +          throws IOException {
 +    String txnString = conf.get(ValidTxnList.VALID_TXNS_KEY);
 +    ValidTxnList validTxnList = txnString == null ? new ValidReadTxnList() :
 +        new ValidReadTxnList(txnString);
  
-     Utilities.copyTablePropertiesToConf(table, conf);
+     try {
+       Utilities.copyTablePropertiesToConf(table, conf);
+     } catch (HiveException e) {
+       throw new IOException(e);
+     }
  
      if (tableScan != null) {
        pushFilters(conf, tableScan);

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
index f4fadbb,aeccfa5..a70fde6
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java
@@@ -78,4 -89,40 +89,40 @@@ public class MapredParquetInputFormat e
        throw new RuntimeException("Cannot create a RecordReaderWrapper", e);
      }
    }
+ 
+   /**
+    * Tries to find the table belonging to the file path of the split.
+    * If the table can be determined, the parquet timezone property will be propagated
+    * to the job configuration to be used during reading.
+    * If the table cannot be determined, then do nothing.
+    * @param split file split being read
+    * @param job configuration to set the timezone property on
+    */
+   private void propagateParquetTimeZoneTablePorperty(FileSplit split, JobConf job) {
+     PartitionDesc part = null;
+     Path filePath = split.getPath();
+     try {
+       MapWork mapWork = Utilities.getMapWork(job);
+       if(mapWork != null) {
+         LOG.debug("Trying to find partition in MapWork for path " + filePath);
+         Map<Path, PartitionDesc> pathToPartitionInfo = mapWork.getPathToPartitionInfo();
+ 
+         part = HiveFileFormatUtils
 -            .getPartitionDescFromPathRecursively(pathToPartitionInfo, filePath, null);
++            .getFromPathRecursively(pathToPartitionInfo, filePath, null);
+         LOG.debug("Partition found " + part);
+       }
+     } catch (AssertionError ae) {
+       LOG.warn("Cannot get partition description from " + filePath
+           + " because " + ae.getMessage());
+       part = null;
+     } catch (Exception e) {
+       LOG.warn("Cannot get partition description from " + filePath
+           + " because " + e.getMessage());
+       part = null;
+     }
+ 
+     if (part != null && part.getTableDesc() != null) {
+       ParquetTableUtils.setParquetTimeZoneIfAbsent(job, part.getTableDesc().getProperties());
+     }
+   }
  }

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
index 8c61568,42f9b66..b0d22bb
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java
@@@ -184,12 -181,16 +184,17 @@@ public class ProjectionPusher 
        throws IOException {
      updateMrWork(jobConf);  // TODO: refactor this in HIVE-6366
      final JobConf cloneJobConf = new JobConf(jobConf);
 -    final PartitionDesc part = pathToPartitionInfo.get(path);
 +    final PartitionDesc part = HiveFileFormatUtils.getFromPathRecursively(
 +        pathToPartitionInfo, path, null, false, true);
  
-     if ((part != null) && (part.getTableDesc() != null)) {
-       Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
+     try {
+       if ((part != null) && (part.getTableDesc() != null)) {
+         Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf);
+       }
+     } catch (Exception e) {
+       throw new IOException(e);
      }
+ 
      pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath());
      return cloneJobConf;
    }

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --cc ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 5401c7b,ad921f3..151da11
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@@ -344,8 -344,9 +344,9 @@@ public class PartialScanTask extends Ta
        }
      }
  
-     QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class));
+     QueryState queryState =
+         new QueryState.Builder().withHiveConf(new HiveConf(conf, PartialScanTask.class)).build();
 -    PartialScanWork mergeWork = new PartialScanWork(inputPaths);
 +    PartialScanWork mergeWork = new PartialScanWork(inputPaths, null);
      DriverContext driverCxt = new DriverContext();
      PartialScanTask taskExec = new PartialScanTask();
      taskExec.initialize(queryState, null, driverCxt, new CompilationOpContext());

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/hive/blob/08edf03f/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
----------------------------------------------------------------------
diff --cc ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index 42cb5a6,adcff44..8d876c7
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@@ -509,19 -509,11 +509,16 @@@ Stage-
                    Conditional Operator
                      Stage-1
                        Map 1 vectorized
- <<<<<<< HEAD
 -                      File Output Operator [FS_10]
 +                      File Output Operator [FS_8]
                          table:{"name:":"default.orc_merge5"}
 -                        Select Operator [SEL_9] (rows=306 width=268)
 +                        Select Operator [SEL_7] (rows=306 width=268)
                            Output:["_col0","_col1","_col2","_col3","_col4"]
 -                          Filter Operator [FIL_8] (rows=306 width=268)
 +                          Filter Operator [FIL_6] (rows=306 width=268)
- =======
 +                      File Output Operator [FS_3]
 +                        table:{"name:":"default.orc_merge5"}
 +                        Select Operator [SEL_2] (rows=306 width=268)
 +                          Output:["_col0","_col1","_col2","_col3","_col4"]
 +                          Filter Operator [FIL_4] (rows=306 width=268)
- >>>>>>> master
                              predicate:(userid <= 13)
                              TableScan [TS_0] (rows=919 width=268)
                                default@orc_merge5,orc_merge5,Tbl:COMPLETE,Col:NONE,Output:["userid","string1","subtype","decimal1","ts"]


[31/50] [abbrv] hive git commit: HIVE-16416 : Service: move constants out from HiveAuthFactory (Zoltan Haindrich, reviewed by Peter Vary, Thejas Nair)

Posted by we...@apache.org.
HIVE-16416 : Service: move constants out from HiveAuthFactory (Zoltan Haindrich, reviewed by Peter Vary, Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/72604208
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/72604208
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/72604208

Branch: refs/heads/hive-14535
Commit: 72604208e9460483c1a05033cb37155aa8c2304d
Parents: c90aa83
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Sun May 14 23:52:32 2017 -0700
Committer: Thejas M Nair <th...@hortonworks.com>
Committed: Sun May 14 23:52:32 2017 -0700

----------------------------------------------------------------------
 .../org/apache/hive/beeline/ProxyAuthTest.java  |  4 +-
 .../hive/minikdc/TestHiveAuthFactory.java       |  5 +-
 .../hive/minikdc/TestJdbcWithMiniKdc.java       |  4 +-
 .../thrift/TestThriftCLIServiceWithBinary.java  |  5 +-
 .../thrift/TestThriftCLIServiceWithHttp.java    |  5 +-
 .../TestThriftHttpCLIServiceFeatures.java       |  5 +-
 .../thrift/ThriftCliServiceMessageSizeTest.java |  5 +-
 .../org/apache/hive/jdbc/HiveConnection.java    | 10 ++--
 .../hive/service/auth/HiveAuthConstants.java    | 43 ++++++++++++++++
 .../hive/service/auth/HiveAuthFactory.java      | 52 ++++++--------------
 .../service/cli/thrift/ThriftCLIService.java    |  7 +--
 .../service/cli/thrift/ThriftHttpServlet.java   |  7 +--
 .../cli/TestRetryingThriftCLIServiceClient.java |  4 +-
 .../service/cli/session/TestSessionHooks.java   |  4 +-
 .../thrift/ThriftCliServiceTestWithCookie.java  |  5 +-
 15 files changed, 96 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
----------------------------------------------------------------------
diff --git a/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java b/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
index 0a08389..318857d 100644
--- a/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
+++ b/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
@@ -29,10 +29,10 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.beeline.BeeLine;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.Utils;
-import org.apache.hive.service.auth.HiveAuthFactory;
 
 /**
  * Simple client application to test various direct and proxy connection to HiveServer2
@@ -203,7 +203,7 @@ public class ProxyAuthTest {
 
   private static void storeTokenInJobConf(String tokenStr) throws Exception {
     Utils.setTokenStr(Utils.getUGI(),
-          tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
+          tokenStr, HiveAuthConstants.HS2_CLIENT_TOKEN);
     System.out.println("Stored token " + tokenStr);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
index 0405ff3..e3a5190 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
@@ -20,6 +20,7 @@ package org.apache.hive.minikdc;
 import org.junit.Assert;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -46,7 +47,7 @@ public class TestHiveAuthFactory {
    */
   @Test
   public void testStartTokenManagerForMemoryTokenStore() throws Exception {
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.KERBEROS.getAuthName());
     String principalName = miniHiveKdc.getFullHiveServicePrincipal();
     System.out.println("Principal: " + principalName);
 
@@ -68,7 +69,7 @@ public class TestHiveAuthFactory {
    */
   @Test
   public void testStartTokenManagerForDBTokenStore() throws Exception {
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.KERBEROS.getAuthName());
     String principalName = miniHiveKdc.getFullHiveServicePrincipal();
     System.out.println("Principal: " + principalName);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
index daf0f7e..256262d 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.jdbc.HiveConnection;
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
-import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.session.HiveSessionHook;
 import org.apache.hive.service.cli.session.HiveSessionHookContext;
@@ -255,7 +255,7 @@ public class TestJdbcWithMiniKdc {
   protected void storeToken(String tokenStr, UserGroupInformation ugi)
       throws Exception {
     Utils.setTokenStr(ugi,
-        tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
+        tokenStr, HiveAuthConstants.HS2_CLIENT_TOKEN);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithBinary.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithBinary.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithBinary.java
index ceaf0e92..842c30f 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithBinary.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithBinary.java
@@ -21,7 +21,8 @@ package org.apache.hive.service.cli.thrift;
 import static org.junit.Assert.assertNotNull;
 
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
+import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -54,7 +55,7 @@ public class TestThriftCLIServiceWithBinary extends ThriftCLIServiceTest {
     hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host);
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, port);
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, AuthTypes.NONE.toString());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NONE.toString());
     hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode);
 
     startHiveServer2WithConf(hiveConf);

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithHttp.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithHttp.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithHttp.java
index b1c6b67..3564272 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithHttp.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCLIServiceWithHttp.java
@@ -29,8 +29,9 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.jdbc.HttpBasicAuthInterceptor;
+import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes;
 import org.apache.hive.service.auth.HiveAuthFactory;
-import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
 import org.apache.hive.service.rpc.thrift.TCLIService;
 import org.apache.hive.service.rpc.thrift.TOpenSessionReq;
 import org.apache.http.Header;
@@ -79,7 +80,7 @@ public class TestThriftCLIServiceWithHttp extends ThriftCLIServiceTest {
     hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host);
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, port);
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, AuthTypes.NOSASL.toString());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString());
     hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH, thriftHttpPath);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
index 1581f9b..1911d2c 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
@@ -44,7 +44,8 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import org.apache.hive.jdbc.HttpBasicAuthInterceptor;
-import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
+import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes;
 import org.apache.hive.service.rpc.thrift.TCLIService;
 import org.apache.hive.service.rpc.thrift.TExecuteStatementReq;
 import org.apache.hive.service.rpc.thrift.TOpenSessionReq;
@@ -133,7 +134,7 @@ public class TestThriftHttpCLIServiceFeatures  {
     hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, ThriftCLIServiceTest.host);
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, ThriftCLIServiceTest.port);
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, AuthTypes.NOSASL.toString());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString());
     hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH, thriftHttpPath);
     hiveConf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java
index 7148307..c859b8f 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/ThriftCliServiceMessageSizeTest.java
@@ -30,7 +30,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hive.service.Service;
-import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
+import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes;
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.server.HiveServer2;
 import org.junit.After;
@@ -110,7 +111,7 @@ public class ThriftCliServiceMessageSizeTest {
     hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host);
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, port);
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, AuthTypes.NONE.toString());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NONE.toString());
     hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, transportMode);
 
     HiveServer2 hiveServer2 = new HiveServer2();

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index fb18adb..8b37ab0 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -21,7 +21,7 @@ package org.apache.hive.jdbc;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.common.auth.HiveAuthUtils;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
-import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
 import org.apache.hive.service.auth.SaslQOP;
@@ -636,7 +636,7 @@ public class HiveConnection implements java.sql.Connection {
     if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
       // check delegation token in job conf if any
       try {
-        tokenStr = org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
+        tokenStr = org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthConstants.HS2_CLIENT_TOKEN);
       } catch (IOException e) {
         throw new SQLException("Error reading token ", e);
       }
@@ -664,9 +664,9 @@ public class HiveConnection implements java.sql.Connection {
 
     // set the session configuration
     Map<String, String> sessVars = connParams.getSessionVars();
-    if (sessVars.containsKey(HiveAuthFactory.HS2_PROXY_USER)) {
-      openConf.put(HiveAuthFactory.HS2_PROXY_USER,
-          sessVars.get(HiveAuthFactory.HS2_PROXY_USER));
+    if (sessVars.containsKey(HiveAuthConstants.HS2_PROXY_USER)) {
+      openConf.put(HiveAuthConstants.HS2_PROXY_USER,
+          sessVars.get(HiveAuthConstants.HS2_PROXY_USER));
     }
     openReq.setConfiguration(openConf);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java b/service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java
new file mode 100644
index 0000000..a49f2f1
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthConstants.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+public class HiveAuthConstants {
+  public enum AuthTypes {
+    NOSASL("NOSASL"),
+    NONE("NONE"),
+    LDAP("LDAP"),
+    KERBEROS("KERBEROS"),
+    CUSTOM("CUSTOM"),
+    PAM("PAM");
+
+    private final String authType;
+
+    AuthTypes(String authType) {
+      this.authType = authType;
+    }
+
+    public String getAuthName() {
+      return authType;
+    }
+  }
+
+  public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
+  public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index dcb6338..541fe5e 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -56,27 +56,6 @@ import org.slf4j.LoggerFactory;
 public class HiveAuthFactory {
   private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
 
-
-  public enum AuthTypes {
-    NOSASL("NOSASL"),
-    NONE("NONE"),
-    LDAP("LDAP"),
-    KERBEROS("KERBEROS"),
-    CUSTOM("CUSTOM"),
-    PAM("PAM");
-
-    private final String authType;
-
-    AuthTypes(String authType) {
-      this.authType = authType;
-    }
-
-    public String getAuthName() {
-      return authType;
-    }
-
-  }
-
   private HadoopThriftAuthBridge.Server saslServer;
   private String authTypeStr;
   private final String transportMode;
@@ -84,9 +63,6 @@ public class HiveAuthFactory {
   private String hadoopAuth;
   private HiveDelegationTokenManager delegationTokenManager = null;
 
-  public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
-  public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
-
   public HiveAuthFactory(HiveConf conf) throws TTransportException {
     this.conf = conf;
     transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
@@ -99,9 +75,9 @@ public class HiveAuthFactory {
     // In http mode we use NOSASL as the default auth type
     if (authTypeStr == null) {
       if ("http".equalsIgnoreCase(transportMode)) {
-        authTypeStr = AuthTypes.NOSASL.getAuthName();
+        authTypeStr = HiveAuthConstants.AuthTypes.NOSASL.getAuthName();
       } else {
-        authTypeStr = AuthTypes.NONE.getAuthName();
+        authTypeStr = HiveAuthConstants.AuthTypes.NONE.getAuthName();
       }
     }
     if (isSASLWithKerberizedHadoop()) {
@@ -156,12 +132,12 @@ public class HiveAuthFactory {
       } catch (TTransportException e) {
         throw new LoginException(e.getMessage());
       }
-      if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+      if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.KERBEROS.getAuthName())) {
         // no-op
-      } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName()) ||
-          authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName()) ||
-          authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName()) ||
-          authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
+      } else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NONE.getAuthName()) ||
+          authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.LDAP.getAuthName()) ||
+          authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.PAM.getAuthName()) ||
+          authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.CUSTOM.getAuthName())) {
         try {
           serverTransportFactory.addServerDefinition("PLAIN",
               authTypeStr, null, new HashMap<String, String>(),
@@ -173,12 +149,12 @@ public class HiveAuthFactory {
         throw new LoginException("Unsupported authentication type " + authTypeStr);
       }
       transportFactory = saslServer.wrapTransportFactory(serverTransportFactory);
-    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName()) ||
-          authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName()) ||
-          authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName()) ||
-          authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
+    } else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NONE.getAuthName()) ||
+          authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.LDAP.getAuthName()) ||
+          authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.PAM.getAuthName()) ||
+          authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.CUSTOM.getAuthName())) {
        transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
-    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
+    } else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.getAuthName())) {
       transportFactory = new TTransportFactory();
     } else {
       throw new LoginException("Unsupported authentication type " + authTypeStr);
@@ -218,7 +194,7 @@ public class HiveAuthFactory {
 
   public boolean isSASLWithKerberizedHadoop() {
     return "kerberos".equalsIgnoreCase(hadoopAuth)
-        && !authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName());
+        && !authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.getAuthName());
   }
 
   public boolean isSASLKerberosUser() {
@@ -259,7 +235,7 @@ public class HiveAuthFactory {
 
     try {
       String tokenStr = delegationTokenManager.getDelegationTokenWithService(owner, renewer,
-          HS2_CLIENT_TOKEN, remoteAddr);
+          HiveAuthConstants.HS2_CLIENT_TOKEN, remoteAddr);
       if (tokenStr == null || tokenStr.isEmpty()) {
         throw new HiveSQLException(
             "Received empty retrieving delegation token for user " + owner, "08S01");

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 0fdc8d9..2b252d2 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hive.service.AbstractService;
 import org.apache.hive.service.ServiceException;
 import org.apache.hive.service.ServiceUtils;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.TSetIpAddressProcessor;
 import org.apache.hive.service.cli.CLIService;
@@ -820,8 +821,8 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
       LOG.debug("Proxy user from query string: " + proxyUser);
     }
 
-    if (proxyUser == null && sessionConf != null && sessionConf.containsKey(HiveAuthFactory.HS2_PROXY_USER)) {
-      String proxyUserFromThriftBody = sessionConf.get(HiveAuthFactory.HS2_PROXY_USER);
+    if (proxyUser == null && sessionConf != null && sessionConf.containsKey(HiveAuthConstants.HS2_PROXY_USER)) {
+      String proxyUserFromThriftBody = sessionConf.get(HiveAuthConstants.HS2_PROXY_USER);
       LOG.debug("Proxy user from thrift body: " + proxyUserFromThriftBody);
       proxyUser = proxyUserFromThriftBody;
     }
@@ -836,7 +837,7 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
     }
 
     // If there's no authentication, then directly substitute the user
-    if (HiveAuthFactory.AuthTypes.NONE.toString().
+    if (HiveAuthConstants.AuthTypes.NONE.toString().
         equalsIgnoreCase(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION))) {
       return proxyUser;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index fbe6da4..cda736c 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthentica
 import org.apache.hive.service.CookieSigner;
 import org.apache.hive.service.auth.AuthenticationProviderFactory;
 import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.HttpAuthUtils;
 import org.apache.hive.service.auth.HttpAuthenticationException;
@@ -192,7 +193,7 @@ public class ThriftHttpServlet extends TServlet {
 
       // Generate new cookie and add it to the response
       if (requireNewCookie &&
-          !authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.NOSASL.toString())) {
+          !authType.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.toString())) {
         String cookieToken = HttpAuthUtils.createCookieToken(clientUserName);
         Cookie hs2Cookie = createCookie(signer.signCookie(cookieToken));
 
@@ -354,7 +355,7 @@ public class ThriftHttpServlet extends TServlet {
       throws HttpAuthenticationException {
     String userName = getUsername(request, authType);
     // No-op when authType is NOSASL
-    if (!authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.NOSASL.toString())) {
+    if (!authType.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.toString())) {
       try {
         AuthMethods authMethod = AuthMethods.getValidAuthMethod(authType);
         PasswdAuthenticationProvider provider =
@@ -567,7 +568,7 @@ public class ThriftHttpServlet extends TServlet {
   }
 
   private boolean isKerberosAuthMode(String authType) {
-    return authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString());
+    return authType.equalsIgnoreCase(HiveAuthConstants.AuthTypes.KERBEROS.toString());
   }
 
   private static String getDoAsQueryParam(String queryString) {

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
----------------------------------------------------------------------
diff --git a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
index 79953c4..2c4a6fe 100644
--- a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
+++ b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
@@ -20,7 +20,7 @@ package org.apache.hive.service.cli;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hive.service.Service;
-import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.cli.session.HiveSession;
 import org.apache.hive.service.cli.thrift.RetryingThriftCLIServiceClient;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
@@ -54,7 +54,7 @@ public class TestRetryingThriftCLIServiceClient {
     hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, "localhost");
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT, 15000);
     hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
-    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.NONE.toString());
+    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NONE.toString());
     hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, "binary");
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT, 3);
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT, 3);

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
----------------------------------------------------------------------
diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
index 2878825..ff1d837 100644
--- a/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
+++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
@@ -28,7 +28,7 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.HiveAuthConstants;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
@@ -90,7 +90,7 @@ public class TestSessionHooks extends TestCase {
     String connectingUser = "user1";
     String proxyUser = System.getProperty("user.name");
     Map<String, String>sessConf = new HashMap<String,String>();
-    sessConf.put(HiveAuthFactory.HS2_PROXY_USER, proxyUser);
+    sessConf.put(HiveAuthConstants.HS2_PROXY_USER, proxyUser);
     sessionUserName = proxyUser;
     SessionHandle sessionHandle = client.openSession(connectingUser, "foobar", sessConf);
     Assert.assertEquals(1, SessionHookTest.runCount.get());

http://git-wip-us.apache.org/repos/asf/hive/blob/72604208/service/src/test/org/apache/hive/service/cli/thrift/ThriftCliServiceTestWithCookie.java
----------------------------------------------------------------------
diff --git a/service/src/test/org/apache/hive/service/cli/thrift/ThriftCliServiceTestWithCookie.java b/service/src/test/org/apache/hive/service/cli/thrift/ThriftCliServiceTestWithCookie.java
index 6fec947..13f582d 100644
--- a/service/src/test/org/apache/hive/service/cli/thrift/ThriftCliServiceTestWithCookie.java
+++ b/service/src/test/org/apache/hive/service/cli/thrift/ThriftCliServiceTestWithCookie.java
@@ -31,7 +31,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hive.service.Service;
-import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
+import org.apache.hive.service.auth.HiveAuthConstants;
+import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes;
 import org.apache.hive.service.cli.OperationHandle;
 import org.apache.hive.service.cli.OperationState;
 import org.apache.hive.service.cli.OperationStatus;
@@ -79,7 +80,7 @@ public class ThriftCliServiceTestWithCookie {
     hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host);
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, port);
-    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, AuthTypes.NOSASL.toString());
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString());
 
     startHiveServer2WithConf(hiveConf);
 


[10/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query75.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query75.q.out b/ql/src/test/results/clientpositive/perf/query75.q.out
index b1e236d..0ecc985 100644
--- a/ql/src/test/results/clientpositive/perf/query75.q.out
+++ b/ql/src/test/results/clientpositive/perf/query75.q.out
@@ -5,28 +5,28 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 13 <- Map 12 (SIMPLE_EDGE), Map 16 (SIMPLE_EDGE)
-Reducer 14 <- Map 17 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-Reducer 15 <- Map 18 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-Reducer 20 <- Map 19 (SIMPLE_EDGE), Map 23 (SIMPLE_EDGE)
-Reducer 21 <- Map 24 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
-Reducer 22 <- Map 25 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 27 <- Map 26 (SIMPLE_EDGE), Map 32 (SIMPLE_EDGE)
-Reducer 28 <- Map 33 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
-Reducer 29 <- Map 34 (SIMPLE_EDGE), Reducer 28 (SIMPLE_EDGE), Union 30 (CONTAINS)
-Reducer 3 <- Map 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 31 <- Union 30 (SIMPLE_EDGE)
-Reducer 36 <- Map 35 (SIMPLE_EDGE), Map 39 (SIMPLE_EDGE)
-Reducer 37 <- Map 40 (SIMPLE_EDGE), Reducer 36 (SIMPLE_EDGE)
-Reducer 38 <- Map 41 (SIMPLE_EDGE), Reducer 37 (SIMPLE_EDGE), Union 30 (CONTAINS)
-Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 43 <- Map 42 (SIMPLE_EDGE), Map 46 (SIMPLE_EDGE)
-Reducer 44 <- Map 47 (SIMPLE_EDGE), Reducer 43 (SIMPLE_EDGE)
-Reducer 45 <- Map 48 (SIMPLE_EDGE), Reducer 44 (SIMPLE_EDGE), Union 30 (CONTAINS)
+Reducer 10 <- Map 27 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Map 28 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Union 12 (CONTAINS)
+Reducer 13 <- Union 12 (SIMPLE_EDGE)
+Reducer 15 <- Map 14 (SIMPLE_EDGE), Map 29 (SIMPLE_EDGE)
+Reducer 16 <- Map 27 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 17 <- Map 30 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 18 <- Map 14 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
+Reducer 19 <- Map 27 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 14 (SIMPLE_EDGE)
+Reducer 20 <- Map 32 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 21 <- Map 14 (SIMPLE_EDGE), Map 29 (SIMPLE_EDGE)
+Reducer 22 <- Map 27 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 23 <- Map 30 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE), Union 12 (CONTAINS)
+Reducer 24 <- Map 14 (SIMPLE_EDGE), Map 31 (SIMPLE_EDGE)
+Reducer 25 <- Map 27 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 26 <- Map 32 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE), Union 12 (CONTAINS)
+Reducer 3 <- Map 27 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 28 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE)
-Reducer 7 <- Reducer 31 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 13 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
 Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
+Reducer 9 <- Map 1 (SIMPLE_EDGE), Map 14 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -46,13 +46,13 @@ Stage-0
                   predicate:((CAST( _col10 AS decimal(17,2)) / CAST( _col4 AS decimal(17,2))) < 0.9)
                   Merge Join Operator [MERGEJOIN_259] (rows=737897778 width=108)
                     Conds:RS_148._col0, _col1, _col2, _col3=RS_149._col0, _col1, _col2, _col3(Inner),Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col10","_col11"]
-                  <-Reducer 31 [SIMPLE_EDGE]
+                  <-Reducer 13 [SIMPLE_EDGE]
                     SHUFFLE [RS_149]
                       PartitionCols:_col0, _col1, _col2, _col3
                       Group By Operator [GBY_146] (rows=670816148 width=108)
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3
-                      <-Union 30 [SIMPLE_EDGE]
-                        <-Reducer 29 [CONTAINS]
+                      <-Union 12 [SIMPLE_EDGE]
+                        <-Reducer 11 [CONTAINS]
                           Reduce Output Operator [RS_145]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_144] (rows=1341632296 width=108)
@@ -61,53 +61,53 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                 Merge Join Operator [MERGEJOIN_252] (rows=383314495 width=135)
                                   Conds:RS_92._col1, _col2=RS_93._col0, _col1(Left Outer),Output:["_col3","_col4","_col8","_col9","_col10","_col12","_col15","_col16"]
-                                <-Map 34 [SIMPLE_EDGE]
+                                <-Map 28 [SIMPLE_EDGE]
                                   SHUFFLE [RS_93]
                                     PartitionCols:_col0, _col1
                                     Select Operator [SEL_85] (rows=28798881 width=106)
                                       Output:["_col0","_col1","_col2","_col3"]
                                       Filter Operator [FIL_232] (rows=28798881 width=106)
                                         predicate:cr_item_sk is not null
-                                        TableScan [TS_83] (rows=28798881 width=106)
+                                        TableScan [TS_9] (rows=28798881 width=106)
                                           default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_item_sk","cr_order_number","cr_return_quantity","cr_return_amount"]
-                                <-Reducer 28 [SIMPLE_EDGE]
+                                <-Reducer 10 [SIMPLE_EDGE]
                                   SHUFFLE [RS_92]
                                     PartitionCols:_col1, _col2
                                     Merge Join Operator [MERGEJOIN_251] (rows=348467716 width=135)
                                       Conds:RS_89._col1=RS_90._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col8","_col9","_col10","_col12"]
-                                    <-Map 33 [SIMPLE_EDGE]
+                                    <-Map 27 [SIMPLE_EDGE]
                                       SHUFFLE [RS_90]
                                         PartitionCols:_col0
                                         Select Operator [SEL_82] (rows=231000 width=1436)
                                           Output:["_col0","_col1","_col2","_col3","_col5"]
                                           Filter Operator [FIL_231] (rows=231000 width=1436)
                                             predicate:((i_category = 'Sports') and i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_manufact_id is not null)
-                                            TableScan [TS_80] (rows=462000 width=1436)
+                                            TableScan [TS_6] (rows=462000 width=1436)
                                               default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id","i_category","i_manufact_id"]
-                                    <-Reducer 27 [SIMPLE_EDGE]
+                                    <-Reducer 9 [SIMPLE_EDGE]
                                       SHUFFLE [RS_89]
                                         PartitionCols:_col1
                                         Merge Join Operator [MERGEJOIN_250] (rows=316788826 width=135)
                                           Conds:RS_86._col0=RS_87._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
-                                        <-Map 26 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_86]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_76] (rows=287989836 width=135)
-                                              Output:["_col0","_col1","_col2","_col3","_col4"]
-                                              Filter Operator [FIL_229] (rows=287989836 width=135)
-                                                predicate:(cs_item_sk is not null and cs_sold_date_sk is not null)
-                                                TableScan [TS_74] (rows=287989836 width=135)
-                                                  default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk","cs_order_number","cs_quantity","cs_ext_sales_price"]
-                                        <-Map 32 [SIMPLE_EDGE]
+                                        <-Map 14 [SIMPLE_EDGE]
                                           SHUFFLE [RS_87]
                                             PartitionCols:_col0
                                             Select Operator [SEL_79] (rows=36524 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_230] (rows=36524 width=1119)
                                                 predicate:((d_year = 2002) and d_date_sk is not null)
-                                                TableScan [TS_77] (rows=73049 width=1119)
+                                                TableScan [TS_3] (rows=73049 width=1119)
                                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                        <-Reducer 38 [CONTAINS]
+                                        <-Map 1 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_86]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_76] (rows=287989836 width=135)
+                                              Output:["_col0","_col1","_col2","_col3","_col4"]
+                                              Filter Operator [FIL_229] (rows=287989836 width=135)
+                                                predicate:(cs_item_sk is not null and cs_sold_date_sk is not null)
+                                                TableScan [TS_0] (rows=287989836 width=135)
+                                                  default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk","cs_order_number","cs_quantity","cs_ext_sales_price"]
+                        <-Reducer 23 [CONTAINS]
                           Reduce Output Operator [RS_145]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_144] (rows=1341632296 width=108)
@@ -116,53 +116,51 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                 Merge Join Operator [MERGEJOIN_255] (rows=766650239 width=88)
                                   Conds:RS_114._col1, _col2=RS_115._col0, _col1(Left Outer),Output:["_col3","_col4","_col8","_col9","_col10","_col12","_col15","_col16"]
-                                <-Map 41 [SIMPLE_EDGE]
+                                <-Map 30 [SIMPLE_EDGE]
                                   SHUFFLE [RS_115]
                                     PartitionCols:_col0, _col1
                                     Select Operator [SEL_107] (rows=57591150 width=77)
                                       Output:["_col0","_col1","_col2","_col3"]
                                       Filter Operator [FIL_236] (rows=57591150 width=77)
                                         predicate:sr_item_sk is not null
-                                        TableScan [TS_105] (rows=57591150 width=77)
+                                        TableScan [TS_31] (rows=57591150 width=77)
                                           default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_item_sk","sr_ticket_number","sr_return_quantity","sr_return_amt"]
-                                <-Reducer 37 [SIMPLE_EDGE]
+                                <-Reducer 22 [SIMPLE_EDGE]
                                   SHUFFLE [RS_114]
                                     PartitionCols:_col1, _col2
                                     Merge Join Operator [MERGEJOIN_254] (rows=696954748 width=88)
                                       Conds:RS_111._col1=RS_112._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col8","_col9","_col10","_col12"]
-                                    <-Map 40 [SIMPLE_EDGE]
+                                    <-Map 27 [SIMPLE_EDGE]
                                       SHUFFLE [RS_112]
                                         PartitionCols:_col0
                                         Select Operator [SEL_104] (rows=231000 width=1436)
                                           Output:["_col0","_col1","_col2","_col3","_col5"]
                                           Filter Operator [FIL_235] (rows=231000 width=1436)
                                             predicate:((i_category = 'Sports') and i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_manufact_id is not null)
-                                            TableScan [TS_102] (rows=462000 width=1436)
-                                              default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id","i_category","i_manufact_id"]
-                                    <-Reducer 36 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_6]
+                                    <-Reducer 21 [SIMPLE_EDGE]
                                       SHUFFLE [RS_111]
                                         PartitionCols:_col1
                                         Merge Join Operator [MERGEJOIN_253] (rows=633595212 width=88)
                                           Conds:RS_108._col0=RS_109._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
-                                        <-Map 35 [SIMPLE_EDGE]
+                                        <-Map 14 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_109]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_101] (rows=36524 width=1119)
+                                              Output:["_col0"]
+                                              Filter Operator [FIL_234] (rows=36524 width=1119)
+                                                predicate:((d_year = 2002) and d_date_sk is not null)
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Map 29 [SIMPLE_EDGE]
                                           SHUFFLE [RS_108]
                                             PartitionCols:_col0
                                             Select Operator [SEL_98] (rows=575995635 width=88)
                                               Output:["_col0","_col1","_col2","_col3","_col4"]
                                               Filter Operator [FIL_233] (rows=575995635 width=88)
                                                 predicate:(ss_item_sk is not null and ss_sold_date_sk is not null)
-                                                TableScan [TS_96] (rows=575995635 width=88)
+                                                TableScan [TS_22] (rows=575995635 width=88)
                                                   default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_ticket_number","ss_quantity","ss_ext_sales_price"]
-                                        <-Map 39 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_109]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_101] (rows=36524 width=1119)
-                                              Output:["_col0"]
-                                              Filter Operator [FIL_234] (rows=36524 width=1119)
-                                                predicate:((d_year = 2002) and d_date_sk is not null)
-                                                TableScan [TS_99] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                        <-Reducer 45 [CONTAINS]
+                        <-Reducer 26 [CONTAINS]
                           Reduce Output Operator [RS_145]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_144] (rows=1341632296 width=108)
@@ -171,59 +169,57 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                 Merge Join Operator [MERGEJOIN_258] (rows=191667562 width=135)
                                   Conds:RS_138._col1, _col2=RS_139._col0, _col1(Left Outer),Output:["_col3","_col4","_col8","_col9","_col10","_col12","_col15","_col16"]
-                                <-Map 48 [SIMPLE_EDGE]
+                                <-Map 32 [SIMPLE_EDGE]
                                   SHUFFLE [RS_139]
                                     PartitionCols:_col0, _col1
                                     Select Operator [SEL_131] (rows=14398467 width=92)
                                       Output:["_col0","_col1","_col2","_col3"]
                                       Filter Operator [FIL_240] (rows=14398467 width=92)
                                         predicate:wr_item_sk is not null
-                                        TableScan [TS_129] (rows=14398467 width=92)
+                                        TableScan [TS_55] (rows=14398467 width=92)
                                           default@web_returns,web_returns,Tbl:COMPLETE,Col:NONE,Output:["wr_item_sk","wr_order_number","wr_return_quantity","wr_return_amt"]
-                                <-Reducer 44 [SIMPLE_EDGE]
+                                <-Reducer 25 [SIMPLE_EDGE]
                                   SHUFFLE [RS_138]
                                     PartitionCols:_col1, _col2
                                     Merge Join Operator [MERGEJOIN_257] (rows=174243235 width=135)
                                       Conds:RS_135._col1=RS_136._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col8","_col9","_col10","_col12"]
-                                    <-Map 47 [SIMPLE_EDGE]
+                                    <-Map 27 [SIMPLE_EDGE]
                                       SHUFFLE [RS_136]
                                         PartitionCols:_col0
                                         Select Operator [SEL_128] (rows=231000 width=1436)
                                           Output:["_col0","_col1","_col2","_col3","_col5"]
                                           Filter Operator [FIL_239] (rows=231000 width=1436)
                                             predicate:((i_category = 'Sports') and i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_manufact_id is not null)
-                                            TableScan [TS_126] (rows=462000 width=1436)
-                                              default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id","i_category","i_manufact_id"]
-                                    <-Reducer 43 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_6]
+                                    <-Reducer 24 [SIMPLE_EDGE]
                                       SHUFFLE [RS_135]
                                         PartitionCols:_col1
                                         Merge Join Operator [MERGEJOIN_256] (rows=158402938 width=135)
                                           Conds:RS_132._col0=RS_133._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
-                                        <-Map 42 [SIMPLE_EDGE]
+                                        <-Map 14 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_133]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_125] (rows=36524 width=1119)
+                                              Output:["_col0"]
+                                              Filter Operator [FIL_238] (rows=36524 width=1119)
+                                                predicate:((d_year = 2002) and d_date_sk is not null)
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Map 31 [SIMPLE_EDGE]
                                           SHUFFLE [RS_132]
                                             PartitionCols:_col0
                                             Select Operator [SEL_122] (rows=144002668 width=135)
                                               Output:["_col0","_col1","_col2","_col3","_col4"]
                                               Filter Operator [FIL_237] (rows=144002668 width=135)
                                                 predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
-                                                TableScan [TS_120] (rows=144002668 width=135)
+                                                TableScan [TS_46] (rows=144002668 width=135)
                                                   default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_order_number","ws_quantity","ws_ext_sales_price"]
-                                        <-Map 46 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_133]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_125] (rows=36524 width=1119)
-                                              Output:["_col0"]
-                                              Filter Operator [FIL_238] (rows=36524 width=1119)
-                                                predicate:((d_year = 2002) and d_date_sk is not null)
-                                                TableScan [TS_123] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
                   <-Reducer 6 [SIMPLE_EDGE]
                     SHUFFLE [RS_148]
                       PartitionCols:_col0, _col1, _col2, _col3
                       Group By Operator [GBY_72] (rows=670816148 width=108)
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3
                       <-Union 5 [SIMPLE_EDGE]
-                        <-Reducer 15 [CONTAINS]
+                        <-Reducer 17 [CONTAINS]
                           Reduce Output Operator [RS_71]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_70] (rows=1341632296 width=108)
@@ -232,53 +228,49 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                 Merge Join Operator [MERGEJOIN_246] (rows=766650239 width=88)
                                   Conds:RS_40._col1, _col2=RS_41._col0, _col1(Left Outer),Output:["_col3","_col4","_col8","_col9","_col10","_col12","_col15","_col16"]
-                                <-Map 18 [SIMPLE_EDGE]
+                                <-Map 30 [SIMPLE_EDGE]
                                   SHUFFLE [RS_41]
                                     PartitionCols:_col0, _col1
                                     Select Operator [SEL_33] (rows=57591150 width=77)
                                       Output:["_col0","_col1","_col2","_col3"]
                                       Filter Operator [FIL_224] (rows=57591150 width=77)
                                         predicate:sr_item_sk is not null
-                                        TableScan [TS_31] (rows=57591150 width=77)
-                                          default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_item_sk","sr_ticket_number","sr_return_quantity","sr_return_amt"]
-                                <-Reducer 14 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_31]
+                                <-Reducer 16 [SIMPLE_EDGE]
                                   SHUFFLE [RS_40]
                                     PartitionCols:_col1, _col2
                                     Merge Join Operator [MERGEJOIN_245] (rows=696954748 width=88)
                                       Conds:RS_37._col1=RS_38._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col8","_col9","_col10","_col12"]
-                                    <-Map 17 [SIMPLE_EDGE]
+                                    <-Map 27 [SIMPLE_EDGE]
                                       SHUFFLE [RS_38]
                                         PartitionCols:_col0
                                         Select Operator [SEL_30] (rows=231000 width=1436)
                                           Output:["_col0","_col1","_col2","_col3","_col5"]
                                           Filter Operator [FIL_223] (rows=231000 width=1436)
                                             predicate:((i_category = 'Sports') and i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_manufact_id is not null)
-                                            TableScan [TS_28] (rows=462000 width=1436)
-                                              default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id","i_category","i_manufact_id"]
-                                    <-Reducer 13 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_6]
+                                    <-Reducer 15 [SIMPLE_EDGE]
                                       SHUFFLE [RS_37]
                                         PartitionCols:_col1
                                         Merge Join Operator [MERGEJOIN_244] (rows=633595212 width=88)
                                           Conds:RS_34._col0=RS_35._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
-                                        <-Map 12 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_34]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_24] (rows=575995635 width=88)
-                                              Output:["_col0","_col1","_col2","_col3","_col4"]
-                                              Filter Operator [FIL_221] (rows=575995635 width=88)
-                                                predicate:(ss_item_sk is not null and ss_sold_date_sk is not null)
-                                                TableScan [TS_22] (rows=575995635 width=88)
-                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_ticket_number","ss_quantity","ss_ext_sales_price"]
-                                        <-Map 16 [SIMPLE_EDGE]
+                                        <-Map 14 [SIMPLE_EDGE]
                                           SHUFFLE [RS_35]
                                             PartitionCols:_col0
                                             Select Operator [SEL_27] (rows=36524 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_222] (rows=36524 width=1119)
                                                 predicate:((d_year = 2001) and d_date_sk is not null)
-                                                TableScan [TS_25] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
-                        <-Reducer 22 [CONTAINS]
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Map 29 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_34]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_24] (rows=575995635 width=88)
+                                              Output:["_col0","_col1","_col2","_col3","_col4"]
+                                              Filter Operator [FIL_221] (rows=575995635 width=88)
+                                                predicate:(ss_item_sk is not null and ss_sold_date_sk is not null)
+                                                 Please refer to the previous TableScan [TS_22]
+                        <-Reducer 20 [CONTAINS]
                           Reduce Output Operator [RS_71]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_70] (rows=1341632296 width=108)
@@ -287,52 +279,48 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                 Merge Join Operator [MERGEJOIN_249] (rows=191667562 width=135)
                                   Conds:RS_64._col1, _col2=RS_65._col0, _col1(Left Outer),Output:["_col3","_col4","_col8","_col9","_col10","_col12","_col15","_col16"]
-                                <-Map 25 [SIMPLE_EDGE]
+                                <-Map 32 [SIMPLE_EDGE]
                                   SHUFFLE [RS_65]
                                     PartitionCols:_col0, _col1
                                     Select Operator [SEL_57] (rows=14398467 width=92)
                                       Output:["_col0","_col1","_col2","_col3"]
                                       Filter Operator [FIL_228] (rows=14398467 width=92)
                                         predicate:wr_item_sk is not null
-                                        TableScan [TS_55] (rows=14398467 width=92)
-                                          default@web_returns,web_returns,Tbl:COMPLETE,Col:NONE,Output:["wr_item_sk","wr_order_number","wr_return_quantity","wr_return_amt"]
-                                <-Reducer 21 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_55]
+                                <-Reducer 19 [SIMPLE_EDGE]
                                   SHUFFLE [RS_64]
                                     PartitionCols:_col1, _col2
                                     Merge Join Operator [MERGEJOIN_248] (rows=174243235 width=135)
                                       Conds:RS_61._col1=RS_62._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col8","_col9","_col10","_col12"]
-                                    <-Map 24 [SIMPLE_EDGE]
+                                    <-Map 27 [SIMPLE_EDGE]
                                       SHUFFLE [RS_62]
                                         PartitionCols:_col0
                                         Select Operator [SEL_54] (rows=231000 width=1436)
                                           Output:["_col0","_col1","_col2","_col3","_col5"]
                                           Filter Operator [FIL_227] (rows=231000 width=1436)
                                             predicate:((i_category = 'Sports') and i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_manufact_id is not null)
-                                            TableScan [TS_52] (rows=462000 width=1436)
-                                              default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id","i_category","i_manufact_id"]
-                                    <-Reducer 20 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_6]
+                                    <-Reducer 18 [SIMPLE_EDGE]
                                       SHUFFLE [RS_61]
                                         PartitionCols:_col1
                                         Merge Join Operator [MERGEJOIN_247] (rows=158402938 width=135)
                                           Conds:RS_58._col0=RS_59._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
-                                        <-Map 19 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_58]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_48] (rows=144002668 width=135)
-                                              Output:["_col0","_col1","_col2","_col3","_col4"]
-                                              Filter Operator [FIL_225] (rows=144002668 width=135)
-                                                predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
-                                                TableScan [TS_46] (rows=144002668 width=135)
-                                                  default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_order_number","ws_quantity","ws_ext_sales_price"]
-                                        <-Map 23 [SIMPLE_EDGE]
+                                        <-Map 14 [SIMPLE_EDGE]
                                           SHUFFLE [RS_59]
                                             PartitionCols:_col0
                                             Select Operator [SEL_51] (rows=36524 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_226] (rows=36524 width=1119)
                                                 predicate:((d_year = 2001) and d_date_sk is not null)
-                                                TableScan [TS_49] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Map 31 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_58]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_48] (rows=144002668 width=135)
+                                              Output:["_col0","_col1","_col2","_col3","_col4"]
+                                              Filter Operator [FIL_225] (rows=144002668 width=135)
+                                                predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
+                                                 Please refer to the previous TableScan [TS_46]
                         <-Reducer 4 [CONTAINS]
                           Reduce Output Operator [RS_71]
                             PartitionCols:_col0, _col1, _col2, _col3
@@ -342,34 +330,40 @@ Stage-0
                                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                                 Merge Join Operator [MERGEJOIN_243] (rows=383314495 width=135)
                                   Conds:RS_18._col1, _col2=RS_19._col0, _col1(Left Outer),Output:["_col3","_col4","_col8","_col9","_col10","_col12","_col15","_col16"]
-                                <-Map 11 [SIMPLE_EDGE]
+                                <-Map 28 [SIMPLE_EDGE]
                                   SHUFFLE [RS_19]
                                     PartitionCols:_col0, _col1
                                     Select Operator [SEL_11] (rows=28798881 width=106)
                                       Output:["_col0","_col1","_col2","_col3"]
                                       Filter Operator [FIL_220] (rows=28798881 width=106)
                                         predicate:cr_item_sk is not null
-                                        TableScan [TS_9] (rows=28798881 width=106)
-                                          default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_item_sk","cr_order_number","cr_return_quantity","cr_return_amount"]
+                                         Please refer to the previous TableScan [TS_9]
                                 <-Reducer 3 [SIMPLE_EDGE]
                                   SHUFFLE [RS_18]
                                     PartitionCols:_col1, _col2
                                     Merge Join Operator [MERGEJOIN_242] (rows=348467716 width=135)
                                       Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col1","_col2","_col3","_col4","_col8","_col9","_col10","_col12"]
-                                    <-Map 10 [SIMPLE_EDGE]
+                                    <-Map 27 [SIMPLE_EDGE]
                                       SHUFFLE [RS_16]
                                         PartitionCols:_col0
                                         Select Operator [SEL_8] (rows=231000 width=1436)
                                           Output:["_col0","_col1","_col2","_col3","_col5"]
                                           Filter Operator [FIL_219] (rows=231000 width=1436)
                                             predicate:((i_category = 'Sports') and i_item_sk is not null and i_brand_id is not null and i_class_id is not null and i_category_id is not null and i_manufact_id is not null)
-                                            TableScan [TS_6] (rows=462000 width=1436)
-                                              default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand_id","i_class_id","i_category_id","i_category","i_manufact_id"]
+                                             Please refer to the previous TableScan [TS_6]
                                     <-Reducer 2 [SIMPLE_EDGE]
                                       SHUFFLE [RS_15]
                                         PartitionCols:_col1
                                         Merge Join Operator [MERGEJOIN_241] (rows=316788826 width=135)
                                           Conds:RS_12._col0=RS_13._col0(Inner),Output:["_col1","_col2","_col3","_col4"]
+                                        <-Map 14 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_13]
+                                            PartitionCols:_col0
+                                            Select Operator [SEL_5] (rows=36524 width=1119)
+                                              Output:["_col0"]
+                                              Filter Operator [FIL_218] (rows=36524 width=1119)
+                                                predicate:((d_year = 2001) and d_date_sk is not null)
+                                                 Please refer to the previous TableScan [TS_3]
                                         <-Map 1 [SIMPLE_EDGE]
                                           SHUFFLE [RS_12]
                                             PartitionCols:_col0
@@ -377,15 +371,5 @@ Stage-0
                                               Output:["_col0","_col1","_col2","_col3","_col4"]
                                               Filter Operator [FIL_217] (rows=287989836 width=135)
                                                 predicate:(cs_item_sk is not null and cs_sold_date_sk is not null)
-                                                TableScan [TS_0] (rows=287989836 width=135)
-                                                  default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_item_sk","cs_order_number","cs_quantity","cs_ext_sales_price"]
-                                        <-Map 9 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_13]
-                                            PartitionCols:_col0
-                                            Select Operator [SEL_5] (rows=36524 width=1119)
-                                              Output:["_col0"]
-                                              Filter Operator [FIL_218] (rows=36524 width=1119)
-                                                predicate:((d_year = 2001) and d_date_sk is not null)
-                                                TableScan [TS_3] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                                 Please refer to the previous TableScan [TS_0]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query76.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query76.q.out b/ql/src/test/results/clientpositive/perf/query76.q.out
index c7dbb37..19243db 100644
--- a/ql/src/test/results/clientpositive/perf/query76.q.out
+++ b/ql/src/test/results/clientpositive/perf/query76.q.out
@@ -5,14 +5,14 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 12 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 13 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Union 4 (CONTAINS)
-Reducer 15 <- Map 14 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE)
-Reducer 16 <- Map 18 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE), Union 4 (CONTAINS)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 3 <- Map 8 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Reducer 10 <- Map 16 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 11 (SIMPLE_EDGE)
+Reducer 3 <- Map 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE), Union 4 (CONTAINS)
 Reducer 5 <- Union 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
+Reducer 7 <- Map 1 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
+Reducer 8 <- Map 14 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Reducer 9 <- Map 1 (SIMPLE_EDGE), Map 15 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -29,48 +29,7 @@ Stage-0
               Group By Operator [GBY_54] (rows=304916424 width=108)
                 Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["count(VALUE._col0)","sum(VALUE._col1)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3, KEY._col4
               <-Union 4 [SIMPLE_EDGE]
-                <-Reducer 11 [CONTAINS]
-                  Reduce Output Operator [RS_53]
-                    PartitionCols:_col0, _col1, _col2, _col3, _col4
-                    Group By Operator [GBY_52] (rows=609832848 width=108)
-                      Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["count()","sum(_col5)"],keys:_col0, _col1, _col2, _col3, _col4
-                      Select Operator [SEL_31] (rows=87121617 width=135)
-                        Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-                        Merge Join Operator [MERGEJOIN_87] (rows=87121617 width=135)
-                          Conds:RS_28._col0=RS_29._col0(Inner),Output:["_col3","_col5","_col7","_col8"]
-                        <-Map 13 [SIMPLE_EDGE]
-                          SHUFFLE [RS_29]
-                            PartitionCols:_col0
-                            Select Operator [SEL_24] (rows=73049 width=1119)
-                              Output:["_col0","_col1","_col2"]
-                              Filter Operator [FIL_80] (rows=73049 width=1119)
-                                predicate:d_date_sk is not null
-                                TableScan [TS_22] (rows=73049 width=1119)
-                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
-                        <-Reducer 10 [SIMPLE_EDGE]
-                          SHUFFLE [RS_28]
-                            PartitionCols:_col0
-                            Merge Join Operator [MERGEJOIN_86] (rows=79201469 width=135)
-                              Conds:RS_25._col1=RS_26._col0(Inner),Output:["_col0","_col3","_col5"]
-                            <-Map 12 [SIMPLE_EDGE]
-                              SHUFFLE [RS_26]
-                                PartitionCols:_col0
-                                Select Operator [SEL_21] (rows=462000 width=1436)
-                                  Output:["_col0","_col1"]
-                                  Filter Operator [FIL_79] (rows=462000 width=1436)
-                                    predicate:i_item_sk is not null
-                                    TableScan [TS_19] (rows=462000 width=1436)
-                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_category"]
-                            <-Map 9 [SIMPLE_EDGE]
-                              SHUFFLE [RS_25]
-                                PartitionCols:_col1
-                                Select Operator [SEL_18] (rows=72001334 width=135)
-                                  Output:["_col0","_col1","_col3"]
-                                  Filter Operator [FIL_78] (rows=72001334 width=135)
-                                    predicate:(ws_web_page_sk is null and ws_item_sk is not null and ws_sold_date_sk is not null)
-                                    TableScan [TS_16] (rows=144002668 width=135)
-                                      default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_web_page_sk","ws_ext_sales_price"]
-                <-Reducer 16 [CONTAINS]
+                <-Reducer 10 [CONTAINS]
                   Reduce Output Operator [RS_53]
                     PartitionCols:_col0, _col1, _col2, _col3, _col4
                     Group By Operator [GBY_52] (rows=609832848 width=108)
@@ -79,7 +38,7 @@ Stage-0
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                         Merge Join Operator [MERGEJOIN_89] (rows=174233858 width=135)
                           Conds:RS_46._col0=RS_47._col0(Inner),Output:["_col3","_col5","_col7","_col8"]
-                        <-Map 18 [SIMPLE_EDGE]
+                        <-Map 16 [SIMPLE_EDGE]
                           SHUFFLE [RS_47]
                             PartitionCols:_col0
                             Select Operator [SEL_42] (rows=73049 width=1119)
@@ -88,12 +47,21 @@ Stage-0
                                 predicate:d_date_sk is not null
                                 TableScan [TS_40] (rows=73049 width=1119)
                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
-                        <-Reducer 15 [SIMPLE_EDGE]
+                        <-Reducer 9 [SIMPLE_EDGE]
                           SHUFFLE [RS_46]
                             PartitionCols:_col0
                             Merge Join Operator [MERGEJOIN_88] (rows=158394413 width=135)
                               Conds:RS_43._col2=RS_44._col0(Inner),Output:["_col0","_col3","_col5"]
-                            <-Map 14 [SIMPLE_EDGE]
+                            <-Map 1 [SIMPLE_EDGE]
+                              SHUFFLE [RS_44]
+                                PartitionCols:_col0
+                                Select Operator [SEL_39] (rows=462000 width=1436)
+                                  Output:["_col0","_col1"]
+                                  Filter Operator [FIL_82] (rows=462000 width=1436)
+                                    predicate:i_item_sk is not null
+                                    TableScan [TS_0] (rows=462000 width=1436)
+                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_category"]
+                            <-Map 15 [SIMPLE_EDGE]
                               SHUFFLE [RS_43]
                                 PartitionCols:_col2
                                 Select Operator [SEL_36] (rows=143994918 width=135)
@@ -102,15 +70,6 @@ Stage-0
                                     predicate:(cs_warehouse_sk is null and cs_item_sk is not null and cs_sold_date_sk is not null)
                                     TableScan [TS_34] (rows=287989836 width=135)
                                       default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_warehouse_sk","cs_item_sk","cs_ext_sales_price"]
-                            <-Map 17 [SIMPLE_EDGE]
-                              SHUFFLE [RS_44]
-                                PartitionCols:_col0
-                                Select Operator [SEL_39] (rows=462000 width=1436)
-                                  Output:["_col0","_col1"]
-                                  Filter Operator [FIL_82] (rows=462000 width=1436)
-                                    predicate:i_item_sk is not null
-                                    TableScan [TS_37] (rows=462000 width=1436)
-                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_category"]
                 <-Reducer 3 [CONTAINS]
                   Reduce Output Operator [RS_53]
                     PartitionCols:_col0, _col1, _col2, _col3, _col4
@@ -120,7 +79,7 @@ Stage-0
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                         Merge Join Operator [MERGEJOIN_85] (rows=348477373 width=88)
                           Conds:RS_12._col2=RS_13._col0(Inner),Output:["_col1","_col5","_col7","_col8"]
-                        <-Map 8 [SIMPLE_EDGE]
+                        <-Map 12 [SIMPLE_EDGE]
                           SHUFFLE [RS_13]
                             PartitionCols:_col0
                             Select Operator [SEL_8] (rows=73049 width=1119)
@@ -141,9 +100,8 @@ Stage-0
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_75] (rows=462000 width=1436)
                                     predicate:i_item_sk is not null
-                                    TableScan [TS_0] (rows=462000 width=1436)
-                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_category"]
-                            <-Map 7 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 11 [SIMPLE_EDGE]
                               SHUFFLE [RS_10]
                                 PartitionCols:_col1
                                 Select Operator [SEL_5] (rows=287997817 width=88)
@@ -152,4 +110,44 @@ Stage-0
                                     predicate:(ss_addr_sk is null and ss_item_sk is not null and ss_sold_date_sk is not null)
                                     TableScan [TS_3] (rows=575995635 width=88)
                                       default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
+                <-Reducer 8 [CONTAINS]
+                  Reduce Output Operator [RS_53]
+                    PartitionCols:_col0, _col1, _col2, _col3, _col4
+                    Group By Operator [GBY_52] (rows=609832848 width=108)
+                      Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["count()","sum(_col5)"],keys:_col0, _col1, _col2, _col3, _col4
+                      Select Operator [SEL_31] (rows=87121617 width=135)
+                        Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
+                        Merge Join Operator [MERGEJOIN_87] (rows=87121617 width=135)
+                          Conds:RS_28._col0=RS_29._col0(Inner),Output:["_col3","_col5","_col7","_col8"]
+                        <-Map 14 [SIMPLE_EDGE]
+                          SHUFFLE [RS_29]
+                            PartitionCols:_col0
+                            Select Operator [SEL_24] (rows=73049 width=1119)
+                              Output:["_col0","_col1","_col2"]
+                              Filter Operator [FIL_80] (rows=73049 width=1119)
+                                predicate:d_date_sk is not null
+                                TableScan [TS_22] (rows=73049 width=1119)
+                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_qoy"]
+                        <-Reducer 7 [SIMPLE_EDGE]
+                          SHUFFLE [RS_28]
+                            PartitionCols:_col0
+                            Merge Join Operator [MERGEJOIN_86] (rows=79201469 width=135)
+                              Conds:RS_25._col1=RS_26._col0(Inner),Output:["_col0","_col3","_col5"]
+                            <-Map 1 [SIMPLE_EDGE]
+                              SHUFFLE [RS_26]
+                                PartitionCols:_col0
+                                Select Operator [SEL_21] (rows=462000 width=1436)
+                                  Output:["_col0","_col1"]
+                                  Filter Operator [FIL_79] (rows=462000 width=1436)
+                                    predicate:i_item_sk is not null
+                                     Please refer to the previous TableScan [TS_0]
+                            <-Map 13 [SIMPLE_EDGE]
+                              SHUFFLE [RS_25]
+                                PartitionCols:_col1
+                                Select Operator [SEL_18] (rows=72001334 width=135)
+                                  Output:["_col0","_col1","_col3"]
+                                  Filter Operator [FIL_78] (rows=72001334 width=135)
+                                    predicate:(ws_web_page_sk is null and ws_item_sk is not null and ws_sold_date_sk is not null)
+                                    TableScan [TS_16] (rows=144002668 width=135)
+                                      default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_web_page_sk","ws_ext_sales_price"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query80.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query80.q.out b/ql/src/test/results/clientpositive/perf/query80.q.out
index be7ecda..3cf41f3 100644
--- a/ql/src/test/results/clientpositive/perf/query80.q.out
+++ b/ql/src/test/results/clientpositive/perf/query80.q.out
@@ -192,23 +192,23 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
-Reducer 17 <- Map 16 (SIMPLE_EDGE), Map 23 (SIMPLE_EDGE)
-Reducer 18 <- Map 24 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-Reducer 19 <- Map 25 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
+Reducer 13 <- Map 12 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE)
+Reducer 14 <- Map 23 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
+Reducer 15 <- Map 24 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
+Reducer 16 <- Map 29 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Reducer 18 <- Map 12 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
+Reducer 19 <- Map 23 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 11 (SIMPLE_EDGE)
-Reducer 20 <- Map 26 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
-Reducer 21 <- Map 27 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
+Reducer 20 <- Map 24 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 21 <- Map 33 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
 Reducer 22 <- Reducer 21 (SIMPLE_EDGE), Union 8 (CONTAINS)
-Reducer 29 <- Map 28 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
+Reducer 27 <- Map 26 (SIMPLE_EDGE), Map 28 (SIMPLE_EDGE)
 Reducer 3 <- Map 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 30 <- Map 36 (SIMPLE_EDGE), Reducer 29 (SIMPLE_EDGE)
-Reducer 31 <- Map 37 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE)
-Reducer 32 <- Map 38 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
-Reducer 33 <- Map 39 (SIMPLE_EDGE), Reducer 32 (SIMPLE_EDGE)
-Reducer 34 <- Reducer 33 (SIMPLE_EDGE), Union 8 (CONTAINS)
-Reducer 4 <- Map 13 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 14 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-Reducer 6 <- Map 15 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+Reducer 31 <- Map 30 (SIMPLE_EDGE), Map 32 (SIMPLE_EDGE)
+Reducer 4 <- Map 23 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 5 <- Map 24 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 6 <- Map 25 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE), Union 8 (CONTAINS)
 Reducer 9 <- Union 8 (SIMPLE_EDGE)
 
@@ -229,7 +229,7 @@ Stage-0
                 Group By Operator [GBY_123] (rows=1217531358 width=108)
                   Output:["_col0","_col1","_col3","_col4","_col5"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)"],keys:KEY._col0, KEY._col1, KEY._col2
                 <-Union 8 [SIMPLE_EDGE]
-                  <-Reducer 22 [CONTAINS]
+                  <-Reducer 17 [CONTAINS]
                     Reduce Output Operator [RS_122]
                       PartitionCols:_col0, _col1, _col2
                       Group By Operator [GBY_121] (rows=2435062716 width=108)
@@ -238,7 +238,7 @@ Stage-0
                           Output:["_col0","_col1","_col2","_col3","_col4"]
                           Group By Operator [GBY_76] (rows=231905279 width=135)
                             Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)"],keys:KEY._col0
-                          <-Reducer 21 [SIMPLE_EDGE]
+                          <-Reducer 16 [SIMPLE_EDGE]
                             SHUFFLE [RS_75]
                               PartitionCols:_col0
                               Group By Operator [GBY_74] (rows=463810558 width=135)
@@ -247,7 +247,7 @@ Stage-0
                                   Output:["_col0","_col1","_col2","_col3"]
                                   Merge Join Operator [MERGEJOIN_213] (rows=463810558 width=135)
                                     Conds:RS_69._col1=RS_70._col0(Inner),Output:["_col5","_col6","_col9","_col10","_col18"]
-                                  <-Map 27 [SIMPLE_EDGE]
+                                  <-Map 29 [SIMPLE_EDGE]
                                     SHUFFLE [RS_70]
                                       PartitionCols:_col0
                                       Select Operator [SEL_56] (rows=46000 width=460)
@@ -256,54 +256,54 @@ Stage-0
                                           predicate:cp_catalog_page_sk is not null
                                           TableScan [TS_54] (rows=46000 width=460)
                                             default@catalog_page,catalog_page,Tbl:COMPLETE,Col:NONE,Output:["cp_catalog_page_sk","cp_catalog_page_id"]
-                                  <-Reducer 20 [SIMPLE_EDGE]
+                                  <-Reducer 15 [SIMPLE_EDGE]
                                     SHUFFLE [RS_69]
                                       PartitionCols:_col1
                                       Merge Join Operator [MERGEJOIN_212] (rows=421645953 width=135)
                                         Conds:RS_66._col3=RS_67._col0(Inner),Output:["_col1","_col5","_col6","_col9","_col10"]
-                                      <-Map 26 [SIMPLE_EDGE]
+                                      <-Map 24 [SIMPLE_EDGE]
                                         SHUFFLE [RS_67]
                                           PartitionCols:_col0
                                           Select Operator [SEL_53] (rows=1150 width=1179)
                                             Output:["_col0"]
                                             Filter Operator [FIL_196] (rows=1150 width=1179)
                                               predicate:((p_channel_tv = 'N') and p_promo_sk is not null)
-                                              TableScan [TS_51] (rows=2300 width=1179)
+                                              TableScan [TS_12] (rows=2300 width=1179)
                                                 default@promotion,promotion,Tbl:COMPLETE,Col:NONE,Output:["p_promo_sk","p_channel_tv"]
-                                      <-Reducer 19 [SIMPLE_EDGE]
+                                      <-Reducer 14 [SIMPLE_EDGE]
                                         SHUFFLE [RS_66]
                                           PartitionCols:_col3
                                           Merge Join Operator [MERGEJOIN_211] (rows=383314495 width=135)
                                             Conds:RS_63._col2=RS_64._col0(Inner),Output:["_col1","_col3","_col5","_col6","_col9","_col10"]
-                                          <-Map 25 [SIMPLE_EDGE]
+                                          <-Map 23 [SIMPLE_EDGE]
                                             SHUFFLE [RS_64]
                                               PartitionCols:_col0
                                               Select Operator [SEL_50] (rows=154000 width=1436)
                                                 Output:["_col0"]
                                                 Filter Operator [FIL_195] (rows=154000 width=1436)
                                                   predicate:((i_current_price > 50) and i_item_sk is not null)
-                                                  TableScan [TS_48] (rows=462000 width=1436)
+                                                  TableScan [TS_9] (rows=462000 width=1436)
                                                     default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_current_price"]
-                                          <-Reducer 18 [SIMPLE_EDGE]
+                                          <-Reducer 13 [SIMPLE_EDGE]
                                             SHUFFLE [RS_63]
                                               PartitionCols:_col2
                                               Merge Join Operator [MERGEJOIN_210] (rows=348467716 width=135)
                                                 Conds:RS_60._col0=RS_61._col0(Inner),Output:["_col1","_col2","_col3","_col5","_col6","_col9","_col10"]
-                                              <-Map 24 [SIMPLE_EDGE]
+                                              <-Map 12 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_61]
                                                   PartitionCols:_col0
                                                   Select Operator [SEL_47] (rows=8116 width=1119)
                                                     Output:["_col0"]
                                                     Filter Operator [FIL_194] (rows=8116 width=1119)
                                                       predicate:(CAST( d_date AS TIMESTAMP) BETWEEN 1998-08-04 00:00:00.0 AND 1998-09-03 00:00:00.0 and d_date_sk is not null)
-                                                      TableScan [TS_45] (rows=73049 width=1119)
+                                                      TableScan [TS_6] (rows=73049 width=1119)
                                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                              <-Reducer 17 [SIMPLE_EDGE]
+                                              <-Reducer 27 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_60]
                                                   PartitionCols:_col0
                                                   Merge Join Operator [MERGEJOIN_209] (rows=316788826 width=135)
                                                     Conds:RS_57._col2, _col4=RS_58._col0, _col1(Left Outer),Output:["_col0","_col1","_col2","_col3","_col5","_col6","_col9","_col10"]
-                                                  <-Map 16 [SIMPLE_EDGE]
+                                                  <-Map 26 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_57]
                                                       PartitionCols:_col2, _col4
                                                       Select Operator [SEL_41] (rows=287989836 width=135)
@@ -312,7 +312,7 @@ Stage-0
                                                           predicate:(cs_sold_date_sk is not null and cs_catalog_page_sk is not null and cs_item_sk is not null and cs_promo_sk is not null)
                                                           TableScan [TS_39] (rows=287989836 width=135)
                                                             default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_catalog_page_sk","cs_item_sk","cs_promo_sk","cs_order_number","cs_ext_sales_price","cs_net_profit"]
-                                                  <-Map 23 [SIMPLE_EDGE]
+                                                  <-Map 28 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_58]
                                                       PartitionCols:_col0, _col1
                                                       Select Operator [SEL_44] (rows=28798881 width=106)
@@ -321,7 +321,7 @@ Stage-0
                                                           predicate:cr_item_sk is not null
                                                           TableScan [TS_42] (rows=28798881 width=106)
                                                             default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_item_sk","cr_order_number","cr_return_amount","cr_net_loss"]
-                  <-Reducer 34 [CONTAINS]
+                  <-Reducer 22 [CONTAINS]
                     Reduce Output Operator [RS_122]
                       PartitionCols:_col0, _col1, _col2
                       Group By Operator [GBY_121] (rows=2435062716 width=108)
@@ -330,7 +330,7 @@ Stage-0
                           Output:["_col0","_col1","_col2","_col3","_col4"]
                           Group By Operator [GBY_117] (rows=115958879 width=135)
                             Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)"],keys:KEY._col0
-                          <-Reducer 33 [SIMPLE_EDGE]
+                          <-Reducer 21 [SIMPLE_EDGE]
                             SHUFFLE [RS_116]
                               PartitionCols:_col0
                               Group By Operator [GBY_115] (rows=231917759 width=135)
@@ -339,7 +339,7 @@ Stage-0
                                   Output:["_col0","_col1","_col2","_col3"]
                                   Merge Join Operator [MERGEJOIN_218] (rows=231917759 width=135)
                                     Conds:RS_110._col2=RS_111._col0(Inner),Output:["_col5","_col6","_col9","_col10","_col18"]
-                                  <-Map 39 [SIMPLE_EDGE]
+                                  <-Map 33 [SIMPLE_EDGE]
                                     SHUFFLE [RS_111]
                                       PartitionCols:_col0
                                       Select Operator [SEL_97] (rows=84 width=1850)
@@ -348,54 +348,51 @@ Stage-0
                                           predicate:web_site_sk is not null
                                           TableScan [TS_95] (rows=84 width=1850)
                                             default@web_site,web_site,Tbl:COMPLETE,Col:NONE,Output:["web_site_sk","web_site_id"]
-                                  <-Reducer 32 [SIMPLE_EDGE]
+                                  <-Reducer 20 [SIMPLE_EDGE]
                                     SHUFFLE [RS_110]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_217] (rows=210834322 width=135)
                                         Conds:RS_107._col3=RS_108._col0(Inner),Output:["_col2","_col5","_col6","_col9","_col10"]
-                                      <-Map 38 [SIMPLE_EDGE]
+                                      <-Map 24 [SIMPLE_EDGE]
                                         SHUFFLE [RS_108]
                                           PartitionCols:_col0
                                           Select Operator [SEL_94] (rows=1150 width=1179)
                                             Output:["_col0"]
                                             Filter Operator [FIL_202] (rows=1150 width=1179)
                                               predicate:((p_channel_tv = 'N') and p_promo_sk is not null)
-                                              TableScan [TS_92] (rows=2300 width=1179)
-                                                default@promotion,promotion,Tbl:COMPLETE,Col:NONE,Output:["p_promo_sk","p_channel_tv"]
-                                      <-Reducer 31 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_12]
+                                      <-Reducer 19 [SIMPLE_EDGE]
                                         SHUFFLE [RS_107]
                                           PartitionCols:_col3
                                           Merge Join Operator [MERGEJOIN_216] (rows=191667562 width=135)
                                             Conds:RS_104._col1=RS_105._col0(Inner),Output:["_col2","_col3","_col5","_col6","_col9","_col10"]
-                                          <-Map 37 [SIMPLE_EDGE]
+                                          <-Map 23 [SIMPLE_EDGE]
                                             SHUFFLE [RS_105]
                                               PartitionCols:_col0
                                               Select Operator [SEL_91] (rows=154000 width=1436)
                                                 Output:["_col0"]
                                                 Filter Operator [FIL_201] (rows=154000 width=1436)
                                                   predicate:((i_current_price > 50) and i_item_sk is not null)
-                                                  TableScan [TS_89] (rows=462000 width=1436)
-                                                    default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_current_price"]
-                                          <-Reducer 30 [SIMPLE_EDGE]
+                                                   Please refer to the previous TableScan [TS_9]
+                                          <-Reducer 18 [SIMPLE_EDGE]
                                             SHUFFLE [RS_104]
                                               PartitionCols:_col1
                                               Merge Join Operator [MERGEJOIN_215] (rows=174243235 width=135)
                                                 Conds:RS_101._col0=RS_102._col0(Inner),Output:["_col1","_col2","_col3","_col5","_col6","_col9","_col10"]
-                                              <-Map 36 [SIMPLE_EDGE]
+                                              <-Map 12 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_102]
                                                   PartitionCols:_col0
                                                   Select Operator [SEL_88] (rows=8116 width=1119)
                                                     Output:["_col0"]
                                                     Filter Operator [FIL_200] (rows=8116 width=1119)
                                                       predicate:(CAST( d_date AS TIMESTAMP) BETWEEN 1998-08-04 00:00:00.0 AND 1998-09-03 00:00:00.0 and d_date_sk is not null)
-                                                      TableScan [TS_86] (rows=73049 width=1119)
-                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                              <-Reducer 29 [SIMPLE_EDGE]
+                                                       Please refer to the previous TableScan [TS_6]
+                                              <-Reducer 31 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_101]
                                                   PartitionCols:_col0
                                                   Merge Join Operator [MERGEJOIN_214] (rows=158402938 width=135)
                                                     Conds:RS_98._col1, _col4=RS_99._col0, _col1(Left Outer),Output:["_col0","_col1","_col2","_col3","_col5","_col6","_col9","_col10"]
-                                                  <-Map 28 [SIMPLE_EDGE]
+                                                  <-Map 30 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_98]
                                                       PartitionCols:_col1, _col4
                                                       Select Operator [SEL_82] (rows=144002668 width=135)
@@ -404,7 +401,7 @@ Stage-0
                                                           predicate:(ws_sold_date_sk is not null and ws_web_site_sk is not null and ws_item_sk is not null and ws_promo_sk is not null)
                                                           TableScan [TS_80] (rows=144002668 width=135)
                                                             default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_web_site_sk","ws_promo_sk","ws_order_number","ws_ext_sales_price","ws_net_profit"]
-                                                  <-Map 35 [SIMPLE_EDGE]
+                                                  <-Map 32 [SIMPLE_EDGE]
                                                     SHUFFLE [RS_99]
                                                       PartitionCols:_col0, _col1
                                                       Select Operator [SEL_85] (rows=14398467 width=92)
@@ -431,7 +428,7 @@ Stage-0
                                   Output:["_col0","_col1","_col2","_col3"]
                                   Merge Join Operator [MERGEJOIN_208] (rows=927646829 width=88)
                                     Conds:RS_30._col2=RS_31._col0(Inner),Output:["_col5","_col6","_col9","_col10","_col18"]
-                                  <-Map 15 [SIMPLE_EDGE]
+                                  <-Map 25 [SIMPLE_EDGE]
                                     SHUFFLE [RS_31]
                                       PartitionCols:_col0
                                       Select Operator [SEL_17] (rows=1704 width=1910)
@@ -445,29 +442,27 @@ Stage-0
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_207] (rows=843315281 width=88)
                                         Conds:RS_27._col3=RS_28._col0(Inner),Output:["_col2","_col5","_col6","_col9","_col10"]
-                                      <-Map 14 [SIMPLE_EDGE]
+                                      <-Map 24 [SIMPLE_EDGE]
                                         SHUFFLE [RS_28]
                                           PartitionCols:_col0
                                           Select Operator [SEL_14] (rows=1150 width=1179)
                                             Output:["_col0"]
                                             Filter Operator [FIL_190] (rows=1150 width=1179)
                                               predicate:((p_channel_tv = 'N') and p_promo_sk is not null)
-                                              TableScan [TS_12] (rows=2300 width=1179)
-                                                default@promotion,promotion,Tbl:COMPLETE,Col:NONE,Output:["p_promo_sk","p_channel_tv"]
+                                               Please refer to the previous TableScan [TS_12]
                                       <-Reducer 4 [SIMPLE_EDGE]
                                         SHUFFLE [RS_27]
                                           PartitionCols:_col3
                                           Merge Join Operator [MERGEJOIN_206] (rows=766650239 width=88)
                                             Conds:RS_24._col1=RS_25._col0(Inner),Output:["_col2","_col3","_col5","_col6","_col9","_col10"]
-                                          <-Map 13 [SIMPLE_EDGE]
+                                          <-Map 23 [SIMPLE_EDGE]
                                             SHUFFLE [RS_25]
                                               PartitionCols:_col0
                                               Select Operator [SEL_11] (rows=154000 width=1436)
                                                 Output:["_col0"]
                                                 Filter Operator [FIL_189] (rows=154000 width=1436)
                                                   predicate:((i_current_price > 50) and i_item_sk is not null)
-                                                  TableScan [TS_9] (rows=462000 width=1436)
-                                                    default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_current_price"]
+                                                   Please refer to the previous TableScan [TS_9]
                                           <-Reducer 3 [SIMPLE_EDGE]
                                             SHUFFLE [RS_24]
                                               PartitionCols:_col1
@@ -480,8 +475,7 @@ Stage-0
                                                     Output:["_col0"]
                                                     Filter Operator [FIL_188] (rows=8116 width=1119)
                                                       predicate:(CAST( d_date AS TIMESTAMP) BETWEEN 1998-08-04 00:00:00.0 AND 1998-09-03 00:00:00.0 and d_date_sk is not null)
-                                                      TableScan [TS_6] (rows=73049 width=1119)
-                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
+                                                       Please refer to the previous TableScan [TS_6]
                                               <-Reducer 2 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_21]
                                                   PartitionCols:_col0

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query81.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query81.q.out b/ql/src/test/results/clientpositive/perf/query81.q.out
index 769a3d1..dfd4639 100644
--- a/ql/src/test/results/clientpositive/perf/query81.q.out
+++ b/ql/src/test/results/clientpositive/perf/query81.q.out
@@ -59,15 +59,15 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Reducer 16 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 14 <- Map 13 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE)
-Reducer 15 <- Map 18 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
-Reducer 16 <- Reducer 15 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 13 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Map 14 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
+Reducer 12 <- Map 15 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-Reducer 7 <- Map 11 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
-Reducer 8 <- Map 12 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 7 <- Map 14 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 15 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
@@ -95,7 +95,7 @@ Stage-0
                         PartitionCols:_col0
                         Merge Join Operator [MERGEJOIN_104] (rows=24200000 width=1014)
                           Conds:RS_51._col1=RS_52._col2(Left Outer),Output:["_col0","_col2","_col3","_col4"]
-                        <-Reducer 16 [SIMPLE_EDGE]
+                        <-Reducer 13 [SIMPLE_EDGE]
                           SHUFFLE [RS_52]
                             PartitionCols:_col2
                             Select Operator [SEL_50] (rows=11000000 width=1014)
@@ -106,45 +106,45 @@ Stage-0
                                   Output:["_col0","_col2"]
                                   Group By Operator [GBY_44] (rows=22000000 width=1014)
                                     Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
-                                  <-Reducer 15 [SIMPLE_EDGE]
+                                  <-Reducer 12 [SIMPLE_EDGE]
                                     SHUFFLE [RS_43]
                                       PartitionCols:_col0
                                       Group By Operator [GBY_42] (rows=44000000 width=1014)
                                         Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col7, _col1
                                         Merge Join Operator [MERGEJOIN_103] (rows=44000000 width=1014)
                                           Conds:RS_38._col2=RS_39._col0(Inner),Output:["_col1","_col3","_col7"]
-                                        <-Map 18 [SIMPLE_EDGE]
+                                        <-Map 15 [SIMPLE_EDGE]
                                           SHUFFLE [RS_39]
                                             PartitionCols:_col0
                                             Select Operator [SEL_34] (rows=40000000 width=1014)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_98] (rows=40000000 width=1014)
                                                 predicate:(ca_address_sk is not null and ca_state is not null)
-                                                TableScan [TS_32] (rows=40000000 width=1014)
+                                                TableScan [TS_12] (rows=40000000 width=1014)
                                                   default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_state"]
-                                        <-Reducer 14 [SIMPLE_EDGE]
+                                        <-Reducer 11 [SIMPLE_EDGE]
                                           SHUFFLE [RS_38]
                                             PartitionCols:_col2
                                             Merge Join Operator [MERGEJOIN_102] (rows=31678769 width=106)
                                               Conds:RS_35._col0=RS_36._col0(Inner),Output:["_col1","_col2","_col3"]
-                                            <-Map 13 [SIMPLE_EDGE]
-                                              SHUFFLE [RS_35]
-                                                PartitionCols:_col0
-                                                Select Operator [SEL_28] (rows=28798881 width=106)
-                                                  Output:["_col0","_col1","_col2","_col3"]
-                                                  Filter Operator [FIL_96] (rows=28798881 width=106)
-                                                    predicate:(cr_returned_date_sk is not null and cr_returning_addr_sk is not null)
-                                                    TableScan [TS_26] (rows=28798881 width=106)
-                                                      default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_returned_date_sk","cr_returning_customer_sk","cr_returning_addr_sk","cr_return_amt_inc_tax"]
-                                            <-Map 17 [SIMPLE_EDGE]
+                                            <-Map 14 [SIMPLE_EDGE]
                                               SHUFFLE [RS_36]
                                                 PartitionCols:_col0
                                                 Select Operator [SEL_31] (rows=36524 width=1119)
                                                   Output:["_col0"]
                                                   Filter Operator [FIL_97] (rows=36524 width=1119)
                                                     predicate:((d_year = 1998) and d_date_sk is not null)
-                                                    TableScan [TS_29] (rows=73049 width=1119)
+                                                    TableScan [TS_9] (rows=73049 width=1119)
                                                       default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                            <-Map 6 [SIMPLE_EDGE]
+                                              SHUFFLE [RS_35]
+                                                PartitionCols:_col0
+                                                Select Operator [SEL_28] (rows=28798881 width=106)
+                                                  Output:["_col0","_col1","_col2","_col3"]
+                                                  Filter Operator [FIL_96] (rows=28798881 width=106)
+                                                    predicate:(cr_returned_date_sk is not null and cr_returning_addr_sk is not null)
+                                                    TableScan [TS_6] (rows=28798881 width=106)
+                                                      default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_returned_date_sk","cr_returning_customer_sk","cr_returning_addr_sk","cr_return_amt_inc_tax"]
                         <-Reducer 9 [SIMPLE_EDGE]
                           SHUFFLE [RS_51]
                             PartitionCols:_col1
@@ -159,29 +159,27 @@ Stage-0
                                     Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col7, _col1
                                     Merge Join Operator [MERGEJOIN_101] (rows=44000000 width=1014)
                                       Conds:RS_18._col2=RS_19._col0(Inner),Output:["_col1","_col3","_col7"]
-                                    <-Map 12 [SIMPLE_EDGE]
+                                    <-Map 15 [SIMPLE_EDGE]
                                       SHUFFLE [RS_19]
                                         PartitionCols:_col0
                                         Select Operator [SEL_14] (rows=40000000 width=1014)
                                           Output:["_col0","_col1"]
                                           Filter Operator [FIL_95] (rows=40000000 width=1014)
                                             predicate:ca_address_sk is not null
-                                            TableScan [TS_12] (rows=40000000 width=1014)
-                                              default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_state"]
+                                             Please refer to the previous TableScan [TS_12]
                                     <-Reducer 7 [SIMPLE_EDGE]
                                       SHUFFLE [RS_18]
                                         PartitionCols:_col2
                                         Merge Join Operator [MERGEJOIN_100] (rows=31678769 width=106)
                                           Conds:RS_15._col0=RS_16._col0(Inner),Output:["_col1","_col2","_col3"]
-                                        <-Map 11 [SIMPLE_EDGE]
+                                        <-Map 14 [SIMPLE_EDGE]
                                           SHUFFLE [RS_16]
                                             PartitionCols:_col0
                                             Select Operator [SEL_11] (rows=36524 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_94] (rows=36524 width=1119)
                                                 predicate:((d_year = 1998) and d_date_sk is not null)
-                                                TableScan [TS_9] (rows=73049 width=1119)
-                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                                                 Please refer to the previous TableScan [TS_9]
                                         <-Map 6 [SIMPLE_EDGE]
                                           SHUFFLE [RS_15]
                                             PartitionCols:_col0
@@ -189,8 +187,7 @@ Stage-0
                                               Output:["_col0","_col1","_col2","_col3"]
                                               Filter Operator [FIL_93] (rows=28798881 width=106)
                                                 predicate:(cr_returned_date_sk is not null and cr_returning_addr_sk is not null and cr_returning_customer_sk is not null)
-                                                TableScan [TS_6] (rows=28798881 width=106)
-                                                  default@catalog_returns,catalog_returns,Tbl:COMPLETE,Col:NONE,Output:["cr_returned_date_sk","cr_returning_customer_sk","cr_returning_addr_sk","cr_return_amt_inc_tax"]
+                                                 Please refer to the previous TableScan [TS_6]
                     <-Reducer 2 [SIMPLE_EDGE]
                       SHUFFLE [RS_58]
                         PartitionCols:_col0


[26/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/explainuser_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_2.q.out b/ql/src/test/results/clientpositive/llap/explainuser_2.q.out
index 2a27479..e3f70b0 100644
--- a/ql/src/test/results/clientpositive/llap/explainuser_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/explainuser_2.q.out
@@ -288,15 +288,15 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 14 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Reducer 10 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-Reducer 16 <- Map 15 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE)
+Reducer 10 <- Map 15 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
-Reducer 8 <- Map 12 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
-Reducer 9 <- Map 13 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 7 <- Map 6 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 8 <- Map 14 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 10 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -319,7 +319,30 @@ Stage-0
                     Output:["_col0","_col1","_col2","_col3","_col4","_col5"],aggregations:["count(_col13)","count(_col21)","count(_col3)"],keys:_col12, _col20, _col2
                     Merge Join Operator [MERGEJOIN_97] (rows=363 width=534)
                       Conds:RS_44._col1, _col3=RS_45._col15, _col17(Inner),Output:["_col2","_col3","_col12","_col13","_col20","_col21"]
-                    <-Reducer 11 [SIMPLE_EDGE] llap
+                    <-Reducer 2 [SIMPLE_EDGE] llap
+                      SHUFFLE [RS_44]
+                        PartitionCols:_col1, _col3
+                        Merge Join Operator [MERGEJOIN_91] (rows=99 width=269)
+                          Conds:RS_41._col0=RS_42._col0(Inner),Output:["_col1","_col2","_col3"]
+                        <-Map 6 [SIMPLE_EDGE] llap
+                          SHUFFLE [RS_42]
+                            PartitionCols:_col0
+                            Select Operator [SEL_5] (rows=7 width=178)
+                              Output:["_col0"]
+                              Filter Operator [FIL_84] (rows=7 width=178)
+                                predicate:((value) IN ('2000Q1', '2000Q2', '2000Q3') and key is not null)
+                                TableScan [TS_3] (rows=500 width=178)
+                                  default@src,d3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                        <-Map 1 [SIMPLE_EDGE] llap
+                          SHUFFLE [RS_41]
+                            PartitionCols:_col0
+                            Select Operator [SEL_2] (rows=170 width=356)
+                              Output:["_col0","_col1","_col2","_col3"]
+                              Filter Operator [FIL_83] (rows=170 width=356)
+                                predicate:(v2 is not null and v3 is not null and k1 is not null)
+                                TableScan [TS_0] (rows=170 width=356)
+                                  default@cs,cs,Tbl:COMPLETE,Col:COMPLETE,Output:["k1","v2","k3","v3"]
+                    <-Reducer 9 [SIMPLE_EDGE] llap
                       SHUFFLE [RS_45]
                         PartitionCols:_col15, _col17
                         Select Operator [SEL_40] (rows=180 width=447)
@@ -327,6 +350,28 @@ Stage-0
                           Merge Join Operator [MERGEJOIN_96] (rows=180 width=447)
                             Conds:RS_37._col6, _col4=RS_38._col4, _col2(Inner),Output:["_col2","_col3","_col14","_col15","_col17"]
                           <-Reducer 10 [SIMPLE_EDGE] llap
+                            SHUFFLE [RS_38]
+                              PartitionCols:_col4, _col2
+                              Merge Join Operator [MERGEJOIN_95] (rows=18 width=356)
+                                Conds:RS_24._col0=RS_25._col0(Inner),Output:["_col2","_col3","_col4","_col5"]
+                              <-Map 6 [SIMPLE_EDGE] llap
+                                SHUFFLE [RS_25]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_23] (rows=7 width=178)
+                                    Output:["_col0"]
+                                    Filter Operator [FIL_90] (rows=7 width=178)
+                                      predicate:((value) IN ('2000Q1', '2000Q2', '2000Q3') and key is not null)
+                                       Please refer to the previous TableScan [TS_3]
+                              <-Map 15 [SIMPLE_EDGE] llap
+                                SHUFFLE [RS_24]
+                                  PartitionCols:_col0
+                                  Select Operator [SEL_20] (rows=8 width=531)
+                                    Output:["_col0","_col2","_col3","_col4","_col5"]
+                                    Filter Operator [FIL_89] (rows=8 width=534)
+                                      predicate:((v1 = 'srv1') and k2 is not null and k3 is not null and v2 is not null and v3 is not null and k1 is not null)
+                                      TableScan [TS_18] (rows=85 width=534)
+                                        default@sr,sr,Tbl:COMPLETE,Col:COMPLETE,Output:["k1","v1","k2","v2","k3","v3"]
+                          <-Reducer 8 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_37]
                               PartitionCols:_col6, _col4
                               Merge Join Operator [MERGEJOIN_94] (rows=40 width=352)
@@ -340,35 +385,25 @@ Stage-0
                                       predicate:((key = 'src1key') and value is not null)
                                       TableScan [TS_15] (rows=25 width=175)
                                         default@src1,src1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                              <-Reducer 9 [SIMPLE_EDGE] llap
+                              <-Reducer 7 [SIMPLE_EDGE] llap
                                 SHUFFLE [RS_34]
                                   PartitionCols:_col3
                                   Merge Join Operator [MERGEJOIN_93] (rows=40 width=352)
                                     Conds:RS_31._col2=RS_32._col0(Inner),Output:["_col2","_col3","_col4","_col6"]
-                                  <-Map 13 [SIMPLE_EDGE] llap
+                                  <-Map 6 [SIMPLE_EDGE] llap
                                     SHUFFLE [RS_32]
                                       PartitionCols:_col0
                                       Select Operator [SEL_14] (rows=2 width=178)
                                         Output:["_col0"]
                                         Filter Operator [FIL_87] (rows=2 width=178)
                                           predicate:((value = 'd1value') and key is not null)
-                                          TableScan [TS_12] (rows=500 width=178)
-                                            default@src,d1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                                  <-Reducer 8 [SIMPLE_EDGE] llap
+                                           Please refer to the previous TableScan [TS_3]
+                                  <-Reducer 12 [SIMPLE_EDGE] llap
                                     SHUFFLE [RS_31]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_92] (rows=40 width=352)
                                         Conds:RS_28._col1=RS_29._col3(Inner),Output:["_col2","_col3","_col4","_col6"]
-                                      <-Map 12 [SIMPLE_EDGE] llap
-                                        SHUFFLE [RS_29]
-                                          PartitionCols:_col3
-                                          Select Operator [SEL_11] (rows=8 width=531)
-                                            Output:["_col0","_col1","_col2","_col3","_col4"]
-                                            Filter Operator [FIL_86] (rows=8 width=534)
-                                              predicate:((v3 = 'ssv3') and k2 is not null and k3 is not null and k1 is not null and v1 is not null and v2 is not null)
-                                              TableScan [TS_9] (rows=85 width=534)
-                                                default@ss,ss,Tbl:COMPLETE,Col:COMPLETE,Output:["k1","v1","k2","v2","k3","v3"]
-                                      <-Map 7 [SIMPLE_EDGE] llap
+                                      <-Map 11 [SIMPLE_EDGE] llap
                                         SHUFFLE [RS_28]
                                           PartitionCols:_col1
                                           Select Operator [SEL_8] (rows=10 width=185)
@@ -377,52 +412,15 @@ Stage-0
                                               predicate:((key = 'srcpartkey') and value is not null)
                                               TableScan [TS_6] (rows=2000 width=178)
                                                 default@srcpart,srcpart,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                          <-Reducer 16 [SIMPLE_EDGE] llap
-                            SHUFFLE [RS_38]
-                              PartitionCols:_col4, _col2
-                              Merge Join Operator [MERGEJOIN_95] (rows=18 width=356)
-                                Conds:RS_24._col0=RS_25._col0(Inner),Output:["_col2","_col3","_col4","_col5"]
-                              <-Map 15 [SIMPLE_EDGE] llap
-                                SHUFFLE [RS_24]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_20] (rows=8 width=531)
-                                    Output:["_col0","_col2","_col3","_col4","_col5"]
-                                    Filter Operator [FIL_89] (rows=8 width=534)
-                                      predicate:((v1 = 'srv1') and k2 is not null and k3 is not null and v2 is not null and v3 is not null and k1 is not null)
-                                      TableScan [TS_18] (rows=85 width=534)
-                                        default@sr,sr,Tbl:COMPLETE,Col:COMPLETE,Output:["k1","v1","k2","v2","k3","v3"]
-                              <-Map 17 [SIMPLE_EDGE] llap
-                                SHUFFLE [RS_25]
-                                  PartitionCols:_col0
-                                  Select Operator [SEL_23] (rows=7 width=178)
-                                    Output:["_col0"]
-                                    Filter Operator [FIL_90] (rows=7 width=178)
-                                      predicate:((value) IN ('2000Q1', '2000Q2', '2000Q3') and key is not null)
-                                      TableScan [TS_21] (rows=500 width=178)
-                                        default@src,d2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                    <-Reducer 2 [SIMPLE_EDGE] llap
-                      SHUFFLE [RS_44]
-                        PartitionCols:_col1, _col3
-                        Merge Join Operator [MERGEJOIN_91] (rows=99 width=269)
-                          Conds:RS_41._col0=RS_42._col0(Inner),Output:["_col1","_col2","_col3"]
-                        <-Map 1 [SIMPLE_EDGE] llap
-                          SHUFFLE [RS_41]
-                            PartitionCols:_col0
-                            Select Operator [SEL_2] (rows=170 width=356)
-                              Output:["_col0","_col1","_col2","_col3"]
-                              Filter Operator [FIL_83] (rows=170 width=356)
-                                predicate:(v2 is not null and v3 is not null and k1 is not null)
-                                TableScan [TS_0] (rows=170 width=356)
-                                  default@cs,cs,Tbl:COMPLETE,Col:COMPLETE,Output:["k1","v2","k3","v3"]
-                        <-Map 6 [SIMPLE_EDGE] llap
-                          SHUFFLE [RS_42]
-                            PartitionCols:_col0
-                            Select Operator [SEL_5] (rows=7 width=178)
-                              Output:["_col0"]
-                              Filter Operator [FIL_84] (rows=7 width=178)
-                                predicate:((value) IN ('2000Q1', '2000Q2', '2000Q3') and key is not null)
-                                TableScan [TS_3] (rows=500 width=178)
-                                  default@src,d3,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                      <-Map 13 [SIMPLE_EDGE] llap
+                                        SHUFFLE [RS_29]
+                                          PartitionCols:_col3
+                                          Select Operator [SEL_11] (rows=8 width=531)
+                                            Output:["_col0","_col1","_col2","_col3","_col4"]
+                                            Filter Operator [FIL_86] (rows=8 width=534)
+                                              predicate:((v3 = 'ssv3') and k2 is not null and k3 is not null and k1 is not null and v1 is not null and v2 is not null)
+                                              TableScan [TS_9] (rows=85 width=534)
+                                                default@ss,ss,Tbl:COMPLETE,Col:COMPLETE,Output:["k1","v1","k2","v2","k3","v3"]
 
 PREHOOK: query: explain
 SELECT x.key, z.value, y.value
@@ -446,15 +444,15 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
-Map 11 <- Union 12 (CONTAINS)
-Map 16 <- Union 12 (CONTAINS)
+Map 13 <- Union 14 (CONTAINS)
+Map 16 <- Union 14 (CONTAINS)
 Map 8 <- Union 2 (CONTAINS)
-Reducer 13 <- Union 12 (SIMPLE_EDGE)
-Reducer 14 <- Map 17 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
-Reducer 15 <- Map 18 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 10 <- Map 9 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 11 <- Map 17 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 15 <- Union 14 (SIMPLE_EDGE)
 Reducer 3 <- Union 2 (SIMPLE_EDGE)
 Reducer 4 <- Map 9 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 10 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 5 <- Map 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE)
 
 Stage-0
@@ -466,7 +464,7 @@ Stage-0
         Group By Operator [GBY_57] (rows=28 width=177)
           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
         <-Union 6 [SIMPLE_EDGE]
-          <-Reducer 15 [CONTAINS] llap
+          <-Reducer 11 [CONTAINS] llap
             Reduce Output Operator [RS_56]
               PartitionCols:_col0, _col1
               Group By Operator [GBY_55] (rows=28 width=177)
@@ -475,7 +473,7 @@ Stage-0
                   Output:["_col0","_col1"]
                   Merge Join Operator [MERGEJOIN_85] (rows=73 width=177)
                     Conds:RS_48._col2=RS_49._col0(Inner),Output:["_col1","_col2"]
-                  <-Map 18 [SIMPLE_EDGE] llap
+                  <-Map 17 [SIMPLE_EDGE] llap
                     SHUFFLE [RS_49]
                       PartitionCols:_col0
                       Select Operator [SEL_44] (rows=500 width=87)
@@ -484,29 +482,29 @@ Stage-0
                           predicate:key is not null
                           TableScan [TS_42] (rows=500 width=87)
                             default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
-                  <-Reducer 14 [SIMPLE_EDGE] llap
+                  <-Reducer 10 [SIMPLE_EDGE] llap
                     SHUFFLE [RS_48]
                       PartitionCols:_col2
                       Merge Join Operator [MERGEJOIN_84] (rows=30 width=177)
                         Conds:RS_45._col1=RS_46._col1(Inner),Output:["_col1","_col2"]
-                      <-Map 17 [SIMPLE_EDGE] llap
+                      <-Map 9 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_46]
                           PartitionCols:_col1
                           Select Operator [SEL_41] (rows=25 width=175)
                             Output:["_col0","_col1"]
                             Filter Operator [FIL_80] (rows=25 width=175)
                               predicate:(key is not null and value is not null)
-                              TableScan [TS_39] (rows=25 width=175)
+                              TableScan [TS_13] (rows=25 width=175)
                                 default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                      <-Reducer 13 [SIMPLE_EDGE] llap
+                      <-Reducer 15 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_45]
                           PartitionCols:_col1
                           Select Operator [SEL_38] (rows=262 width=178)
                             Output:["_col1"]
                             Group By Operator [GBY_37] (rows=262 width=178)
                               Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                            <-Union 12 [SIMPLE_EDGE]
-                              <-Map 11 [CONTAINS] llap
+                            <-Union 14 [SIMPLE_EDGE]
+                              <-Map 13 [CONTAINS] llap
                                 Reduce Output Operator [RS_36]
                                   PartitionCols:_col0, _col1
                                   Group By Operator [GBY_35] (rows=262 width=178)
@@ -537,7 +535,7 @@ Stage-0
                   Output:["_col0","_col1"]
                   Merge Join Operator [MERGEJOIN_83] (rows=73 width=177)
                     Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col2"]
-                  <-Map 10 [SIMPLE_EDGE] llap
+                  <-Map 12 [SIMPLE_EDGE] llap
                     SHUFFLE [RS_23]
                       PartitionCols:_col0
                       Select Operator [SEL_18] (rows=500 width=87)
@@ -558,8 +556,7 @@ Stage-0
                             Output:["_col0","_col1"]
                             Filter Operator [FIL_76] (rows=25 width=175)
                               predicate:(key is not null and value is not null)
-                              TableScan [TS_13] (rows=25 width=175)
-                                default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                               Please refer to the previous TableScan [TS_13]
                       <-Reducer 3 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_19]
                           PartitionCols:_col1
@@ -622,25 +619,25 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
 Map 10 <- Union 2 (CONTAINS)
-Map 13 <- Union 14 (CONTAINS)
-Map 20 <- Union 14 (CONTAINS)
-Map 21 <- Union 16 (CONTAINS)
-Map 24 <- Union 25 (CONTAINS)
-Map 33 <- Union 25 (CONTAINS)
-Map 34 <- Union 27 (CONTAINS)
-Map 35 <- Union 29 (CONTAINS)
-Reducer 15 <- Union 14 (SIMPLE_EDGE), Union 16 (CONTAINS)
-Reducer 17 <- Union 16 (SIMPLE_EDGE)
-Reducer 18 <- Map 22 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-Reducer 19 <- Map 23 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE), Union 6 (CONTAINS)
-Reducer 26 <- Union 25 (SIMPLE_EDGE), Union 27 (CONTAINS)
-Reducer 28 <- Union 27 (SIMPLE_EDGE), Union 29 (CONTAINS)
+Map 17 <- Union 18 (CONTAINS)
+Map 22 <- Union 18 (CONTAINS)
+Map 23 <- Union 20 (CONTAINS)
+Map 25 <- Union 26 (CONTAINS)
+Map 32 <- Union 26 (CONTAINS)
+Map 33 <- Union 28 (CONTAINS)
+Map 34 <- Union 30 (CONTAINS)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Reducer 21 (SIMPLE_EDGE)
+Reducer 13 <- Map 24 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 14 <- Map 11 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
+Reducer 15 <- Map 16 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Reducer 19 <- Union 18 (SIMPLE_EDGE), Union 20 (CONTAINS)
+Reducer 21 <- Union 20 (SIMPLE_EDGE)
+Reducer 27 <- Union 26 (SIMPLE_EDGE), Union 28 (CONTAINS)
+Reducer 29 <- Union 28 (SIMPLE_EDGE), Union 30 (CONTAINS)
 Reducer 3 <- Union 2 (SIMPLE_EDGE)
-Reducer 30 <- Union 29 (SIMPLE_EDGE)
-Reducer 31 <- Map 36 (SIMPLE_EDGE), Reducer 30 (SIMPLE_EDGE)
-Reducer 32 <- Map 37 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE), Union 8 (CONTAINS)
+Reducer 31 <- Union 30 (SIMPLE_EDGE)
 Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 5 <- Map 16 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE), Union 8 (CONTAINS)
 Reducer 9 <- Union 8 (SIMPLE_EDGE)
 
@@ -653,7 +650,7 @@ Stage-0
         Group By Operator [GBY_120] (rows=107 width=177)
           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
         <-Union 8 [SIMPLE_EDGE]
-          <-Reducer 32 [CONTAINS] llap
+          <-Reducer 15 [CONTAINS] llap
             Reduce Output Operator [RS_119]
               PartitionCols:_col0, _col1
               Group By Operator [GBY_118] (rows=107 width=177)
@@ -662,38 +659,38 @@ Stage-0
                   Output:["_col0","_col1"]
                   Merge Join Operator [MERGEJOIN_170] (rows=124 width=177)
                     Conds:RS_111._col2=RS_112._col0(Inner),Output:["_col2","_col5"]
-                  <-Map 37 [SIMPLE_EDGE] llap
+                  <-Map 16 [SIMPLE_EDGE] llap
                     SHUFFLE [RS_112]
                       PartitionCols:_col0
                       Select Operator [SEL_107] (rows=500 width=178)
                         Output:["_col0","_col1"]
                         Filter Operator [FIL_164] (rows=500 width=178)
                           predicate:key is not null
-                          TableScan [TS_105] (rows=500 width=178)
+                          TableScan [TS_16] (rows=500 width=178)
                             default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                  <-Reducer 31 [SIMPLE_EDGE] llap
+                  <-Reducer 14 [SIMPLE_EDGE] llap
                     SHUFFLE [RS_111]
                       PartitionCols:_col2
                       Merge Join Operator [MERGEJOIN_169] (rows=51 width=86)
                         Conds:RS_108._col1=RS_109._col1(Inner),Output:["_col2"]
-                      <-Map 36 [SIMPLE_EDGE] llap
+                      <-Map 11 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_109]
                           PartitionCols:_col1
                           Select Operator [SEL_104] (rows=25 width=175)
                             Output:["_col0","_col1"]
                             Filter Operator [FIL_163] (rows=25 width=175)
                               predicate:(key is not null and value is not null)
-                              TableScan [TS_102] (rows=25 width=175)
+                              TableScan [TS_13] (rows=25 width=175)
                                 default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                      <-Reducer 30 [SIMPLE_EDGE] llap
+                      <-Reducer 31 [SIMPLE_EDGE] llap
                         SHUFFLE [RS_108]
                           PartitionCols:_col1
                           Select Operator [SEL_101] (rows=440 width=178)
                             Output:["_col1"]
                             Group By Operator [GBY_100] (rows=440 width=178)
                               Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                            <-Union 29 [SIMPLE_EDGE]
-                              <-Map 35 [CONTAINS] llap
+                            <-Union 30 [SIMPLE_EDGE]
+                              <-Map 34 [CONTAINS] llap
                                 Reduce Output Operator [RS_99]
                                   PartitionCols:_col0, _col1
                                   Group By Operator [GBY_98] (rows=440 width=178)
@@ -704,7 +701,7 @@ Stage-0
                                         predicate:value is not null
                                         TableScan [TS_92] (rows=500 width=178)
                                           Output:["key","value"]
-                              <-Reducer 28 [CONTAINS] llap
+                              <-Reducer 29 [CONTAINS] llap
                                 Reduce Output Operator [RS_99]
                                   PartitionCols:_col0, _col1
                                   Group By Operator [GBY_98] (rows=440 width=178)
@@ -713,8 +710,8 @@ Stage-0
                                       Output:["_col0","_col1"]
                                       Group By Operator [GBY_90] (rows=381 width=178)
                                         Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                      <-Union 27 [SIMPLE_EDGE]
-                                        <-Map 34 [CONTAINS] llap
+                                      <-Union 28 [SIMPLE_EDGE]
+                                        <-Map 33 [CONTAINS] llap
                                           Reduce Output Operator [RS_89]
                                             PartitionCols:_col0, _col1
                                             Group By Operator [GBY_88] (rows=381 width=178)
@@ -725,7 +722,7 @@ Stage-0
                                                   predicate:value is not null
                                                   TableScan [TS_82] (rows=500 width=178)
                                                     Output:["key","value"]
-                                        <-Reducer 26 [CONTAINS] llap
+                                        <-Reducer 27 [CONTAINS] llap
                                           Reduce Output Operator [RS_89]
                                             PartitionCols:_col0, _col1
                                             Group By Operator [GBY_88] (rows=381 width=178)
@@ -734,8 +731,8 @@ Stage-0
                                                 Output:["_col0","_col1"]
                                                 Group By Operator [GBY_80] (rows=262 width=178)
                                                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                                <-Union 25 [SIMPLE_EDGE]
-                                                  <-Map 24 [CONTAINS] llap
+                                                <-Union 26 [SIMPLE_EDGE]
+                                                  <-Map 25 [CONTAINS] llap
                                                     Reduce Output Operator [RS_79]
                                                       PartitionCols:_col0, _col1
                                                       Group By Operator [GBY_78] (rows=262 width=178)
@@ -746,7 +743,7 @@ Stage-0
                                                             predicate:value is not null
                                                             TableScan [TS_69] (rows=25 width=175)
                                                               Output:["key","value"]
-                                                  <-Map 33 [CONTAINS] llap
+                                                  <-Map 32 [CONTAINS] llap
                                                     Reduce Output Operator [RS_79]
                                                       PartitionCols:_col0, _col1
                                                       Group By Operator [GBY_78] (rows=262 width=178)
@@ -765,7 +762,7 @@ Stage-0
                 Group By Operator [GBY_67] (rows=90 width=177)
                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
                 <-Union 6 [SIMPLE_EDGE]
-                  <-Reducer 19 [CONTAINS] llap
+                  <-Reducer 13 [CONTAINS] llap
                     Reduce Output Operator [RS_66]
                       PartitionCols:_col0, _col1
                       Group By Operator [GBY_65] (rows=90 width=177)
@@ -774,7 +771,7 @@ Stage-0
                           Output:["_col0","_col1"]
                           Merge Join Operator [MERGEJOIN_168] (rows=107 width=177)
                             Conds:RS_58._col2=RS_59._col0(Inner),Output:["_col2","_col5"]
-                          <-Map 23 [SIMPLE_EDGE] llap
+                          <-Map 24 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_59]
                               PartitionCols:_col0
                               Select Operator [SEL_54] (rows=500 width=178)
@@ -783,29 +780,28 @@ Stage-0
                                   predicate:key is not null
                                   TableScan [TS_52] (rows=500 width=178)
                                     default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                          <-Reducer 18 [SIMPLE_EDGE] llap
+                          <-Reducer 12 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_58]
                               PartitionCols:_col2
                               Merge Join Operator [MERGEJOIN_167] (rows=44 width=86)
                                 Conds:RS_55._col1=RS_56._col1(Inner),Output:["_col2"]
-                              <-Map 22 [SIMPLE_EDGE] llap
+                              <-Map 11 [SIMPLE_EDGE] llap
                                 SHUFFLE [RS_56]
                                   PartitionCols:_col1
                                   Select Operator [SEL_51] (rows=25 width=175)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_157] (rows=25 width=175)
                                       predicate:(key is not null and value is not null)
-                                      TableScan [TS_49] (rows=25 width=175)
-                                        default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
-                              <-Reducer 17 [SIMPLE_EDGE] llap
+                                       Please refer to the previous TableScan [TS_13]
+                              <-Reducer 21 [SIMPLE_EDGE] llap
                                 SHUFFLE [RS_55]
                                   PartitionCols:_col1
                                   Select Operator [SEL_48] (rows=381 width=178)
                                     Output:["_col1"]
                                     Group By Operator [GBY_47] (rows=381 width=178)
                                       Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                    <-Union 16 [SIMPLE_EDGE]
-                                      <-Map 21 [CONTAINS] llap
+                                    <-Union 20 [SIMPLE_EDGE]
+                                      <-Map 23 [CONTAINS] llap
                                         Reduce Output Operator [RS_46]
                                           PartitionCols:_col0, _col1
                                           Group By Operator [GBY_45] (rows=381 width=178)
@@ -816,7 +812,7 @@ Stage-0
                                                 predicate:value is not null
                                                 TableScan [TS_39] (rows=500 width=178)
                                                   Output:["key","value"]
-                                      <-Reducer 15 [CONTAINS] llap
+                                      <-Reducer 19 [CONTAINS] llap
                                         Reduce Output Operator [RS_46]
                                           PartitionCols:_col0, _col1
                                           Group By Operator [GBY_45] (rows=381 width=178)
@@ -825,8 +821,8 @@ Stage-0
                                               Output:["_col0","_col1"]
                                               Group By Operator [GBY_37] (rows=262 width=178)
                                                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                              <-Union 14 [SIMPLE_EDGE]
-                                                <-Map 13 [CONTAINS] llap
+                                              <-Union 18 [SIMPLE_EDGE]
+                                                <-Map 17 [CONTAINS] llap
                                                   Reduce Output Operator [RS_36]
                                                     PartitionCols:_col0, _col1
                                                     Group By Operator [GBY_35] (rows=262 width=178)
@@ -837,7 +833,7 @@ Stage-0
                                                           predicate:value is not null
                                                           TableScan [TS_26] (rows=25 width=175)
                                                             Output:["key","value"]
-                                                <-Map 20 [CONTAINS] llap
+                                                <-Map 22 [CONTAINS] llap
                                                   Reduce Output Operator [RS_36]
                                                     PartitionCols:_col0, _col1
                                                     Group By Operator [GBY_35] (rows=262 width=178)
@@ -857,15 +853,14 @@ Stage-0
                           Output:["_col0","_col1"]
                           Merge Join Operator [MERGEJOIN_166] (rows=73 width=177)
                             Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col2","_col5"]
-                          <-Map 12 [SIMPLE_EDGE] llap
+                          <-Map 16 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_23]
                               PartitionCols:_col0
                               Select Operator [SEL_18] (rows=500 width=178)
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_153] (rows=500 width=178)
                                   predicate:key is not null
-                                  TableScan [TS_16] (rows=500 width=178)
-                                    default@src,y,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                   Please refer to the previous TableScan [TS_16]
                           <-Reducer 4 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_22]
                               PartitionCols:_col2
@@ -878,8 +873,7 @@ Stage-0
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_152] (rows=25 width=175)
                                       predicate:(key is not null and value is not null)
-                                      TableScan [TS_13] (rows=25 width=175)
-                                        default@src1,x,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"]
+                                       Please refer to the previous TableScan [TS_13]
                               <-Reducer 3 [SIMPLE_EDGE] llap
                                 SHUFFLE [RS_19]
                                   PartitionCols:_col1
@@ -1326,7 +1320,7 @@ Reducer 13 <- Union 12 (SIMPLE_EDGE), Union 14 (CONTAINS)
 Reducer 15 <- Map 18 (BROADCAST_EDGE), Map 19 (BROADCAST_EDGE), Union 14 (SIMPLE_EDGE), Union 4 (CONTAINS)
 Reducer 22 <- Union 21 (SIMPLE_EDGE), Union 23 (CONTAINS)
 Reducer 24 <- Union 23 (SIMPLE_EDGE), Union 25 (CONTAINS)
-Reducer 26 <- Map 30 (BROADCAST_EDGE), Map 31 (BROADCAST_EDGE), Union 25 (SIMPLE_EDGE), Union 6 (CONTAINS)
+Reducer 26 <- Map 10 (BROADCAST_EDGE), Map 9 (BROADCAST_EDGE), Union 25 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 3 <- Map 10 (BROADCAST_EDGE), Map 9 (BROADCAST_EDGE), Union 2 (SIMPLE_EDGE), Union 4 (CONTAINS)
 Reducer 5 <- Union 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
 Reducer 7 <- Union 6 (SIMPLE_EDGE)
@@ -1349,25 +1343,25 @@ Stage-0
                   Output:["_col0","_col1"]
                   Map Join Operator [MAPJOIN_170] (rows=550 width=10)
                     Conds:MAPJOIN_169._col2=RS_112._col0(Inner),Output:["_col2","_col5"]
-                  <-Map 31 [BROADCAST_EDGE] llap
+                  <-Map 10 [BROADCAST_EDGE] llap
                     BROADCAST [RS_112]
                       PartitionCols:_col0
                       Select Operator [SEL_107] (rows=500 width=10)
                         Output:["_col0","_col1"]
                         Filter Operator [FIL_164] (rows=500 width=10)
                           predicate:key is not null
-                          TableScan [TS_105] (rows=500 width=10)
+                          TableScan [TS_16] (rows=500 width=10)
                             default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
                   <-Map Join Operator [MAPJOIN_169] (rows=484 width=10)
                       Conds:SEL_101._col1=RS_109._col1(Inner),Output:["_col2"]
-                    <-Map 30 [BROADCAST_EDGE] llap
+                    <-Map 9 [BROADCAST_EDGE] llap
                       BROADCAST [RS_109]
                         PartitionCols:_col1
                         Select Operator [SEL_104] (rows=25 width=7)
                           Output:["_col0","_col1"]
                           Filter Operator [FIL_163] (rows=25 width=7)
                             predicate:(key is not null and value is not null)
-                            TableScan [TS_102] (rows=25 width=7)
+                            TableScan [TS_13] (rows=25 width=7)
                               default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
                     <-Select Operator [SEL_101] (rows=440 width=10)
                         Output:["_col1"]
@@ -1539,8 +1533,7 @@ Stage-0
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_153] (rows=500 width=10)
                                   predicate:key is not null
-                                  TableScan [TS_16] (rows=500 width=10)
-                                    default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                                   Please refer to the previous TableScan [TS_16]
                           <-Map Join Operator [MAPJOIN_165] (rows=288 width=10)
                               Conds:SEL_12._col1=RS_20._col1(Inner),Output:["_col2"]
                             <-Map 9 [BROADCAST_EDGE] llap
@@ -1550,8 +1543,7 @@ Stage-0
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_152] (rows=25 width=7)
                                     predicate:(key is not null and value is not null)
-                                    TableScan [TS_13] (rows=25 width=7)
-                                      default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                                     Please refer to the previous TableScan [TS_13]
                             <-Select Operator [SEL_12] (rows=262 width=10)
                                 Output:["_col1"]
                                 Group By Operator [GBY_11] (rows=262 width=10)
@@ -2048,36 +2040,36 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Map 6 (BROADCAST_EDGE), Union 2 (CONTAINS)
-Map 12 <- Union 9 (CONTAINS)
-Map 13 <- Union 9 (CONTAINS)
-Map 16 <- Map 17 (BROADCAST_EDGE)
-Map 18 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
-Map 19 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
-Map 20 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
-Map 21 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 10 <- Union 11 (CONTAINS)
+Map 12 <- Union 11 (CONTAINS)
+Map 13 <- Union 11 (CONTAINS)
+Map 16 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 17 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 18 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 19 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
 Map 5 <- Map 6 (BROADCAST_EDGE), Union 2 (CONTAINS)
-Map 8 <- Union 9 (CONTAINS)
-Reducer 10 <- Map 14 (SIMPLE_EDGE), Union 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 15 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Map 6 <- Map 15 (BROADCAST_EDGE)
 Reducer 3 <- Map 7 (SIMPLE_EDGE), Union 2 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Reducer 8 <- Map 7 (SIMPLE_EDGE), Union 11 (SIMPLE_EDGE)
+Reducer 9 <- Map 14 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE), Union 4 (CONTAINS)
 
 Stage-0
   Fetch Operator
     limit:-1
     Stage-1
       Union 4
-      <-Map 18 [CONTAINS] llap
+      <-Map 16 [CONTAINS] llap
         File Output Operator [FS_78]
           Select Operator [SEL_76] (rows=1677 width=10)
             Output:["_col0","_col1"]
             Map Join Operator [MAPJOIN_123] (rows=1677 width=10)
               Conds:RS_73._col1=SEL_56._col0(Inner),Output:["_col0","_col3"]
-            <-Map 16 [BROADCAST_EDGE] llap
+            <-Map 6 [BROADCAST_EDGE] llap
               BROADCAST [RS_73]
                 PartitionCols:_col1
                 Map Join Operator [MAPJOIN_122] (rows=27 width=7)
                   Conds:SEL_50._col0=RS_71._col0(Inner),Output:["_col0","_col1","_col3"]
-                <-Map 17 [BROADCAST_EDGE] llap
+                <-Map 15 [BROADCAST_EDGE] llap
                   BROADCAST [RS_71]
                     PartitionCols:_col0
                     Select Operator [SEL_53] (rows=25 width=7)
@@ -2090,7 +2082,7 @@ Stage-0
                     Output:["_col0","_col1"]
                     Filter Operator [FIL_112] (rows=25 width=7)
                       predicate:(key is not null and value is not null)
-                      TableScan [TS_48] (rows=25 width=7)
+                      TableScan [TS_8] (rows=25 width=7)
                         default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
             <-Select Operator [SEL_56] (rows=25 width=7)
                 Output:["_col0"]
@@ -2098,13 +2090,13 @@ Stage-0
                   predicate:value is not null
                   TableScan [TS_54] (rows=25 width=7)
                     Output:["value"]
-      <-Map 19 [CONTAINS] llap
+      <-Map 17 [CONTAINS] llap
         File Output Operator [FS_78]
           Select Operator [SEL_76] (rows=1677 width=10)
             Output:["_col0","_col1"]
             Map Join Operator [MAPJOIN_123] (rows=1677 width=10)
               Conds:RS_128._col1=SEL_59._col0(Inner),Output:["_col0","_col3"]
-            <-Map 16 [BROADCAST_EDGE] llap
+            <-Map 6 [BROADCAST_EDGE] llap
               BROADCAST [RS_128]
                 PartitionCols:_col1
                  Please refer to the previous Map Join Operator [MAPJOIN_122]
@@ -2114,13 +2106,13 @@ Stage-0
                   predicate:value is not null
                   TableScan [TS_57] (rows=500 width=10)
                     Output:["value"]
-      <-Map 20 [CONTAINS] llap
+      <-Map 18 [CONTAINS] llap
         File Output Operator [FS_78]
           Select Operator [SEL_76] (rows=1677 width=10)
             Output:["_col0","_col1"]
             Map Join Operator [MAPJOIN_123] (rows=1677 width=10)
               Conds:RS_129._col1=SEL_64._col0(Inner),Output:["_col0","_col3"]
-            <-Map 16 [BROADCAST_EDGE] llap
+            <-Map 6 [BROADCAST_EDGE] llap
               BROADCAST [RS_129]
                 PartitionCols:_col1
                  Please refer to the previous Map Join Operator [MAPJOIN_122]
@@ -2130,13 +2122,13 @@ Stage-0
                   predicate:value is not null
                   TableScan [TS_62] (rows=500 width=10)
                     Output:["value"]
-      <-Map 21 [CONTAINS] llap
+      <-Map 19 [CONTAINS] llap
         File Output Operator [FS_78]
           Select Operator [SEL_76] (rows=1677 width=10)
             Output:["_col0","_col1"]
             Map Join Operator [MAPJOIN_123] (rows=1677 width=10)
               Conds:RS_130._col1=SEL_68._col0(Inner),Output:["_col0","_col3"]
-            <-Map 16 [BROADCAST_EDGE] llap
+            <-Map 6 [BROADCAST_EDGE] llap
               BROADCAST [RS_130]
                 PartitionCols:_col1
                  Please refer to the previous Map Join Operator [MAPJOIN_122]
@@ -2146,63 +2138,6 @@ Stage-0
                   predicate:value is not null
                   TableScan [TS_66] (rows=500 width=10)
                     Output:["value"]
-      <-Reducer 11 [CONTAINS] llap
-        File Output Operator [FS_78]
-          Select Operator [SEL_45] (rows=1239 width=10)
-            Output:["_col0","_col1"]
-            Merge Join Operator [MERGEJOIN_121] (rows=1239 width=10)
-              Conds:RS_42._col1=RS_43._col0(Inner),Output:["_col1","_col4"]
-            <-Map 15 [SIMPLE_EDGE] llap
-              SHUFFLE [RS_43]
-                PartitionCols:_col0
-                Select Operator [SEL_38] (rows=500 width=10)
-                  Output:["_col0","_col1"]
-                  Filter Operator [FIL_111] (rows=500 width=10)
-                    predicate:key is not null
-                    TableScan [TS_36] (rows=500 width=10)
-                      default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-            <-Reducer 10 [SIMPLE_EDGE] llap
-              SHUFFLE [RS_42]
-                PartitionCols:_col1
-                Merge Join Operator [MERGEJOIN_120] (rows=1127 width=10)
-                  Conds:Union 9._col0=RS_40._col1(Inner),Output:["_col1"]
-                <-Map 14 [SIMPLE_EDGE] llap
-                  SHUFFLE [RS_40]
-                    PartitionCols:_col1
-                    Select Operator [SEL_35] (rows=500 width=10)
-                      Output:["_col0","_col1"]
-                      Filter Operator [FIL_110] (rows=500 width=10)
-                        predicate:(key is not null and value is not null)
-                        TableScan [TS_33] (rows=500 width=10)
-                          default@src,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-                <-Union 9 [SIMPLE_EDGE]
-                  <-Map 12 [CONTAINS] llap
-                    Reduce Output Operator [RS_39]
-                      PartitionCols:_col0
-                      Select Operator [SEL_26] (rows=500 width=10)
-                        Output:["_col0"]
-                        Filter Operator [FIL_108] (rows=500 width=10)
-                          predicate:value is not null
-                          TableScan [TS_24] (rows=500 width=10)
-                            Output:["value"]
-                  <-Map 13 [CONTAINS] llap
-                    Reduce Output Operator [RS_39]
-                      PartitionCols:_col0
-                      Select Operator [SEL_31] (rows=500 width=10)
-                        Output:["_col0"]
-                        Filter Operator [FIL_109] (rows=500 width=10)
-                          predicate:value is not null
-                          TableScan [TS_29] (rows=500 width=10)
-                            Output:["value"]
-                  <-Map 8 [CONTAINS] llap
-                    Reduce Output Operator [RS_39]
-                      PartitionCols:_col0
-                      Select Operator [SEL_23] (rows=25 width=7)
-                        Output:["_col0"]
-                        Filter Operator [FIL_107] (rows=25 width=7)
-                          predicate:value is not null
-                          TableScan [TS_21] (rows=25 width=7)
-                            Output:["value"]
       <-Reducer 3 [CONTAINS] llap
         File Output Operator [FS_78]
           Select Operator [SEL_20] (rows=634 width=10)
@@ -2231,8 +2166,7 @@ Stage-0
                         Output:["_col0","_col1"]
                         Filter Operator [FIL_105] (rows=25 width=7)
                           predicate:(key is not null and value is not null)
-                          TableScan [TS_8] (rows=25 width=7)
-                            default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                           Please refer to the previous TableScan [TS_8]
                   <-Select Operator [SEL_2] (rows=25 width=7)
                       Output:["_col0"]
                       Filter Operator [FIL_103] (rows=25 width=7)
@@ -2254,6 +2188,62 @@ Stage-0
                         predicate:value is not null
                         TableScan [TS_3] (rows=500 width=10)
                           Output:["value"]
+      <-Reducer 9 [CONTAINS] llap
+        File Output Operator [FS_78]
+          Select Operator [SEL_45] (rows=1239 width=10)
+            Output:["_col0","_col1"]
+            Merge Join Operator [MERGEJOIN_121] (rows=1239 width=10)
+              Conds:RS_42._col1=RS_43._col0(Inner),Output:["_col1","_col4"]
+            <-Map 14 [SIMPLE_EDGE] llap
+              SHUFFLE [RS_43]
+                PartitionCols:_col0
+                Select Operator [SEL_38] (rows=500 width=10)
+                  Output:["_col0","_col1"]
+                  Filter Operator [FIL_111] (rows=500 width=10)
+                    predicate:key is not null
+                    TableScan [TS_36] (rows=500 width=10)
+                      default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+            <-Reducer 8 [SIMPLE_EDGE] llap
+              SHUFFLE [RS_42]
+                PartitionCols:_col1
+                Merge Join Operator [MERGEJOIN_120] (rows=1127 width=10)
+                  Conds:Union 11._col0=RS_40._col1(Inner),Output:["_col1"]
+                <-Map 7 [SIMPLE_EDGE] llap
+                  SHUFFLE [RS_40]
+                    PartitionCols:_col1
+                    Select Operator [SEL_35] (rows=500 width=10)
+                      Output:["_col0","_col1"]
+                      Filter Operator [FIL_110] (rows=500 width=10)
+                        predicate:(key is not null and value is not null)
+                         Please refer to the previous TableScan [TS_11]
+                <-Union 11 [SIMPLE_EDGE]
+                  <-Map 10 [CONTAINS] llap
+                    Reduce Output Operator [RS_39]
+                      PartitionCols:_col0
+                      Select Operator [SEL_23] (rows=25 width=7)
+                        Output:["_col0"]
+                        Filter Operator [FIL_107] (rows=25 width=7)
+                          predicate:value is not null
+                          TableScan [TS_21] (rows=25 width=7)
+                            Output:["value"]
+                  <-Map 12 [CONTAINS] llap
+                    Reduce Output Operator [RS_39]
+                      PartitionCols:_col0
+                      Select Operator [SEL_26] (rows=500 width=10)
+                        Output:["_col0"]
+                        Filter Operator [FIL_108] (rows=500 width=10)
+                          predicate:value is not null
+                          TableScan [TS_24] (rows=500 width=10)
+                            Output:["value"]
+                  <-Map 13 [CONTAINS] llap
+                    Reduce Output Operator [RS_39]
+                      PartitionCols:_col0
+                      Select Operator [SEL_31] (rows=500 width=10)
+                        Output:["_col0"]
+                        Filter Operator [FIL_109] (rows=500 width=10)
+                          predicate:value is not null
+                          TableScan [TS_29] (rows=500 width=10)
+                            Output:["value"]
 
 PREHOOK: query: explain
 SELECT x.key, y.value
@@ -2285,23 +2275,23 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
-Map 12 <- Union 13 (CONTAINS)
-Map 19 <- Union 13 (CONTAINS)
-Map 20 <- Union 15 (CONTAINS)
-Map 23 <- Map 24 (BROADCAST_EDGE)
-Map 25 <- Union 26 (CONTAINS)
-Map 32 <- Union 26 (CONTAINS)
-Map 33 <- Union 28 (CONTAINS)
-Map 34 <- Union 30 (CONTAINS)
+Map 10 <- Map 22 (BROADCAST_EDGE)
+Map 14 <- Union 15 (CONTAINS)
+Map 19 <- Union 15 (CONTAINS)
+Map 20 <- Union 17 (CONTAINS)
+Map 23 <- Union 24 (CONTAINS)
+Map 30 <- Union 24 (CONTAINS)
+Map 31 <- Union 26 (CONTAINS)
+Map 32 <- Union 28 (CONTAINS)
 Map 9 <- Union 2 (CONTAINS)
-Reducer 14 <- Union 13 (SIMPLE_EDGE), Union 15 (CONTAINS)
-Reducer 16 <- Union 15 (SIMPLE_EDGE)
-Reducer 17 <- Map 21 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-Reducer 18 <- Map 22 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
+Reducer 13 <- Map 21 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 16 <- Union 15 (SIMPLE_EDGE), Union 17 (CONTAINS)
+Reducer 18 <- Union 17 (SIMPLE_EDGE)
+Reducer 25 <- Union 24 (SIMPLE_EDGE), Union 26 (CONTAINS)
 Reducer 27 <- Union 26 (SIMPLE_EDGE), Union 28 (CONTAINS)
-Reducer 29 <- Union 28 (SIMPLE_EDGE), Union 30 (CONTAINS)
+Reducer 29 <- Map 10 (BROADCAST_EDGE), Union 28 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 3 <- Map 10 (BROADCAST_EDGE), Union 2 (SIMPLE_EDGE)
-Reducer 31 <- Map 23 (BROADCAST_EDGE), Union 30 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 8 <- Union 7 (SIMPLE_EDGE)
@@ -2315,7 +2305,7 @@ Stage-0
         Group By Operator [GBY_120] (rows=530 width=10)
           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
         <-Union 7 [SIMPLE_EDGE]
-          <-Reducer 31 [CONTAINS] llap
+          <-Reducer 29 [CONTAINS] llap
             Reduce Output Operator [RS_119]
               PartitionCols:_col0, _col1
               Group By Operator [GBY_118] (rows=1061 width=10)
@@ -2324,12 +2314,12 @@ Stage-0
                   Output:["_col0","_col1"]
                   Map Join Operator [MAPJOIN_167] (rows=484 width=10)
                     Conds:RS_111._col1=SEL_107._col1(Inner),Output:["_col0","_col3"]
-                  <-Map 23 [BROADCAST_EDGE] llap
+                  <-Map 10 [BROADCAST_EDGE] llap
                     BROADCAST [RS_111]
                       PartitionCols:_col1
                       Map Join Operator [MAPJOIN_166] (rows=27 width=7)
                         Conds:SEL_71._col0=RS_109._col0(Inner),Output:["_col0","_col1","_col3"]
-                      <-Map 24 [BROADCAST_EDGE] llap
+                      <-Map 22 [BROADCAST_EDGE] llap
                         BROADCAST [RS_109]
                           PartitionCols:_col0
                           Select Operator [SEL_74] (rows=25 width=7)
@@ -2342,14 +2332,14 @@ Stage-0
                           Output:["_col0","_col1"]
                           Filter Operator [FIL_156] (rows=25 width=7)
                             predicate:(key is not null and value is not null)
-                            TableScan [TS_69] (rows=25 width=7)
+                            TableScan [TS_13] (rows=25 width=7)
                               default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
                   <-Select Operator [SEL_107] (rows=440 width=10)
                       Output:["_col1"]
                       Group By Operator [GBY_106] (rows=440 width=10)
                         Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                      <-Union 30 [SIMPLE_EDGE]
-                        <-Map 34 [CONTAINS] llap
+                      <-Union 28 [SIMPLE_EDGE]
+                        <-Map 32 [CONTAINS] llap
                           Reduce Output Operator [RS_105]
                             PartitionCols:_col0, _col1
                             Group By Operator [GBY_104] (rows=881 width=10)
@@ -2360,7 +2350,7 @@ Stage-0
                                   predicate:value is not null
                                   TableScan [TS_98] (rows=500 width=10)
                                     Output:["key","value"]
-                        <-Reducer 29 [CONTAINS] llap
+                        <-Reducer 27 [CONTAINS] llap
                           Reduce Output Operator [RS_105]
                             PartitionCols:_col0, _col1
                             Group By Operator [GBY_104] (rows=881 width=10)
@@ -2369,8 +2359,8 @@ Stage-0
                                 Output:["_col0","_col1"]
                                 Group By Operator [GBY_96] (rows=381 width=10)
                                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                <-Union 28 [SIMPLE_EDGE]
-                                  <-Map 33 [CONTAINS] llap
+                                <-Union 26 [SIMPLE_EDGE]
+                                  <-Map 31 [CONTAINS] llap
                                     Reduce Output Operator [RS_95]
                                       PartitionCols:_col0, _col1
                                       Group By Operator [GBY_94] (rows=762 width=10)
@@ -2381,7 +2371,7 @@ Stage-0
                                             predicate:value is not null
                                             TableScan [TS_88] (rows=500 width=10)
                                               Output:["key","value"]
-                                  <-Reducer 27 [CONTAINS] llap
+                                  <-Reducer 25 [CONTAINS] llap
                                     Reduce Output Operator [RS_95]
                                       PartitionCols:_col0, _col1
                                       Group By Operator [GBY_94] (rows=762 width=10)
@@ -2390,8 +2380,8 @@ Stage-0
                                           Output:["_col0","_col1"]
                                           Group By Operator [GBY_86] (rows=262 width=10)
                                             Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                          <-Union 26 [SIMPLE_EDGE]
-                                            <-Map 25 [CONTAINS] llap
+                                          <-Union 24 [SIMPLE_EDGE]
+                                            <-Map 23 [CONTAINS] llap
                                               Reduce Output Operator [RS_85]
                                                 PartitionCols:_col0, _col1
                                                 Group By Operator [GBY_84] (rows=525 width=10)
@@ -2402,7 +2392,7 @@ Stage-0
                                                       predicate:value is not null
                                                       TableScan [TS_75] (rows=25 width=7)
                                                         Output:["key","value"]
-                                            <-Map 32 [CONTAINS] llap
+                                            <-Map 30 [CONTAINS] llap
                                               Reduce Output Operator [RS_85]
                                                 PartitionCols:_col0, _col1
                                                 Group By Operator [GBY_84] (rows=525 width=10)
@@ -2421,7 +2411,7 @@ Stage-0
                 Group By Operator [GBY_67] (rows=577 width=10)
                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
                 <-Union 5 [SIMPLE_EDGE]
-                  <-Reducer 18 [CONTAINS] llap
+                  <-Reducer 13 [CONTAINS] llap
                     Reduce Output Operator [RS_66]
                       PartitionCols:_col0, _col1
                       Group By Operator [GBY_65] (rows=1155 width=10)
@@ -2430,7 +2420,7 @@ Stage-0
                           Output:["_col0","_col1"]
                           Merge Join Operator [MERGEJOIN_165] (rows=605 width=10)
                             Conds:RS_58._col2=RS_59._col0(Inner),Output:["_col2","_col5"]
-                          <-Map 22 [SIMPLE_EDGE] llap
+                          <-Map 21 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_59]
                               PartitionCols:_col0
                               Select Operator [SEL_54] (rows=500 width=10)
@@ -2439,28 +2429,28 @@ Stage-0
                                   predicate:key is not null
                                   TableScan [TS_52] (rows=500 width=10)
                                     default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-                          <-Reducer 17 [SIMPLE_EDGE] llap
+                          <-Reducer 12 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_58]
                               PartitionCols:_col2
                               Merge Join Operator [MERGEJOIN_164] (rows=550 width=10)
                                 Conds:RS_55._col1=RS_56._col1(Inner),Output:["_col2"]
-                              <-Map 21 [SIMPLE_EDGE] llap
+                              <-Map 11 [SIMPLE_EDGE] llap
                                 SHUFFLE [RS_56]
                                   PartitionCols:_col1
                                   Select Operator [SEL_51] (rows=500 width=10)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_154] (rows=500 width=10)
                                       predicate:(key is not null and value is not null)
-                                      TableScan [TS_49] (rows=500 width=10)
-                                        default@src,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-                              <-Reducer 16 [SIMPLE_EDGE] llap
+                                      TableScan [TS_16] (rows=500 width=10)
+                                        default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                              <-Reducer 18 [SIMPLE_EDGE] llap
                                 SHUFFLE [RS_55]
                                   PartitionCols:_col1
                                   Select Operator [SEL_48] (rows=381 width=10)
                                     Output:["_col1"]
                                     Group By Operator [GBY_47] (rows=381 width=10)
                                       Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                    <-Union 15 [SIMPLE_EDGE]
+                                    <-Union 17 [SIMPLE_EDGE]
                                       <-Map 20 [CONTAINS] llap
                                         Reduce Output Operator [RS_46]
                                           PartitionCols:_col0, _col1
@@ -2472,7 +2462,7 @@ Stage-0
                                                 predicate:value is not null
                                                 TableScan [TS_39] (rows=500 width=10)
                                                   Output:["key","value"]
-                                      <-Reducer 14 [CONTAINS] llap
+                                      <-Reducer 16 [CONTAINS] llap
                                         Reduce Output Operator [RS_46]
                                           PartitionCols:_col0, _col1
                                           Group By Operator [GBY_45] (rows=762 width=10)
@@ -2481,8 +2471,8 @@ Stage-0
                                               Output:["_col0","_col1"]
                                               Group By Operator [GBY_37] (rows=262 width=10)
                                                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                              <-Union 13 [SIMPLE_EDGE]
-                                                <-Map 12 [CONTAINS] llap
+                                              <-Union 15 [SIMPLE_EDGE]
+                                                <-Map 14 [CONTAINS] llap
                                                   Reduce Output Operator [RS_36]
                                                     PartitionCols:_col0, _col1
                                                     Group By Operator [GBY_35] (rows=525 width=10)
@@ -2520,8 +2510,7 @@ Stage-0
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_150] (rows=500 width=10)
                                   predicate:key is not null
-                                  TableScan [TS_16] (rows=500 width=10)
-                                    default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                                   Please refer to the previous TableScan [TS_16]
                           <-Reducer 3 [SIMPLE_EDGE] llap
                             SHUFFLE [RS_22]
                               PartitionCols:_col2
@@ -2534,8 +2523,7 @@ Stage-0
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_149] (rows=25 width=7)
                                       predicate:(key is not null and value is not null)
-                                      TableScan [TS_13] (rows=25 width=7)
-                                        default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                                       Please refer to the previous TableScan [TS_13]
                               <-Select Operator [SEL_12] (rows=262 width=10)
                                   Output:["_col1"]
                                   Group By Operator [GBY_11] (rows=262 width=10)
@@ -2630,18 +2618,18 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Map 6 (BROADCAST_EDGE), Union 2 (CONTAINS)
-Map 12 <- Union 9 (CONTAINS)
-Map 13 <- Union 9 (CONTAINS)
-Map 16 <- Map 17 (BROADCAST_EDGE)
-Map 18 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
-Map 19 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
-Map 20 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
-Map 21 <- Map 16 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 10 <- Union 11 (CONTAINS)
+Map 12 <- Union 11 (CONTAINS)
+Map 13 <- Union 11 (CONTAINS)
+Map 16 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 17 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 18 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
+Map 19 <- Map 6 (BROADCAST_EDGE), Union 4 (CONTAINS)
 Map 5 <- Map 6 (BROADCAST_EDGE), Union 2 (CONTAINS)
-Map 8 <- Union 9 (CONTAINS)
-Reducer 10 <- Map 14 (SIMPLE_EDGE), Union 9 (SIMPLE_EDGE)
-Reducer 11 <- Map 15 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Map 6 <- Map 15 (BROADCAST_EDGE)
 Reducer 3 <- Map 7 (SIMPLE_EDGE), Union 2 (SIMPLE_EDGE), Union 4 (CONTAINS)
+Reducer 8 <- Map 7 (SIMPLE_EDGE), Union 11 (SIMPLE_EDGE)
+Reducer 9 <- Map 14 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE), Union 4 (CONTAINS)
 
 Stage-5
   Stats-Aggr Operator
@@ -2652,19 +2640,19 @@ Stage-5
           Dependency Collection{}
             Stage-3
               Union 4
-              <-Map 18 [CONTAINS] llap
+              <-Map 16 [CONTAINS] llap
                 File Output Operator [FS_79]
                   table:{"name:":"default.a"}
                   Select Operator [SEL_76] (rows=1677 width=10)
                     Output:["_col0","_col1"]
                     Map Join Operator [MAPJOIN_128] (rows=1677 width=10)
                       Conds:RS_73._col1=SEL_56._col0(Inner),Output:["_col0","_col3"]
-                    <-Map 16 [BROADCAST_EDGE] llap
+                    <-Map 6 [BROADCAST_EDGE] llap
                       BROADCAST [RS_73]
                         PartitionCols:_col1
                         Map Join Operator [MAPJOIN_127] (rows=27 width=7)
                           Conds:SEL_50._col0=RS_71._col0(Inner),Output:["_col0","_col1","_col3"]
-                        <-Map 17 [BROADCAST_EDGE] llap
+                        <-Map 15 [BROADCAST_EDGE] llap
                           BROADCAST [RS_71]
                             PartitionCols:_col0
                             Select Operator [SEL_53] (rows=25 width=7)
@@ -2677,7 +2665,7 @@ Stage-5
                             Output:["_col0","_col1"]
                             Filter Operator [FIL_117] (rows=25 width=7)
                               predicate:(key is not null and value is not null)
-                              TableScan [TS_48] (rows=25 width=7)
+                              TableScan [TS_8] (rows=25 width=7)
                                 default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
                     <-Select Operator [SEL_56] (rows=25 width=7)
                         Output:["_col0"]
@@ -2691,14 +2679,14 @@ Stage-5
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
                    Please refer to the previous Select Operator [SEL_76]
-              <-Map 19 [CONTAINS] llap
+              <-Map 17 [CONTAINS] llap
                 File Output Operator [FS_79]
                   table:{"name:":"default.a"}
                   Select Operator [SEL_76] (rows=1677 width=10)
                     Output:["_col0","_col1"]
                     Map Join Operator [MAPJOIN_128] (rows=1677 width=10)
                       Conds:RS_133._col1=SEL_59._col0(Inner),Output:["_col0","_col3"]
-                    <-Map 16 [BROADCAST_EDGE] llap
+                    <-Map 6 [BROADCAST_EDGE] llap
                       BROADCAST [RS_133]
                         PartitionCols:_col1
                          Please refer to the previous Map Join Operator [MAPJOIN_127]
@@ -2714,14 +2702,14 @@ Stage-5
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
                    Please refer to the previous Select Operator [SEL_76]
-              <-Map 20 [CONTAINS] llap
+              <-Map 18 [CONTAINS] llap
                 File Output Operator [FS_79]
                   table:{"name:":"default.a"}
                   Select Operator [SEL_76] (rows=1677 width=10)
                     Output:["_col0","_col1"]
                     Map Join Operator [MAPJOIN_128] (rows=1677 width=10)
                       Conds:RS_134._col1=SEL_64._col0(Inner),Output:["_col0","_col3"]
-                    <-Map 16 [BROADCAST_EDGE] llap
+                    <-Map 6 [BROADCAST_EDGE] llap
                       BROADCAST [RS_134]
                         PartitionCols:_col1
                          Please refer to the previous Map Join Operator [MAPJOIN_127]
@@ -2737,14 +2725,14 @@ Stage-5
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
                    Please refer to the previous Select Operator [SEL_76]
-              <-Map 21 [CONTAINS] llap
+              <-Map 19 [CONTAINS] llap
                 File Output Operator [FS_79]
                   table:{"name:":"default.a"}
                   Select Operator [SEL_76] (rows=1677 width=10)
                     Output:["_col0","_col1"]
                     Map Join Operator [MAPJOIN_128] (rows=1677 width=10)
                       Conds:RS_135._col1=SEL_68._col0(Inner),Output:["_col0","_col3"]
-                    <-Map 16 [BROADCAST_EDGE] llap
+                    <-Map 6 [BROADCAST_EDGE] llap
                       BROADCAST [RS_135]
                         PartitionCols:_col1
                          Please refer to the previous Map Join Operator [MAPJOIN_127]
@@ -2760,70 +2748,6 @@ Stage-5
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
                    Please refer to the previous Select Operator [SEL_76]
-              <-Reducer 11 [CONTAINS] llap
-                File Output Operator [FS_79]
-                  table:{"name:":"default.a"}
-                  Select Operator [SEL_45] (rows=1239 width=10)
-                    Output:["_col0","_col1"]
-                    Merge Join Operator [MERGEJOIN_126] (rows=1239 width=10)
-                      Conds:RS_42._col1=RS_43._col0(Inner),Output:["_col1","_col4"]
-                    <-Map 15 [SIMPLE_EDGE] llap
-                      SHUFFLE [RS_43]
-                        PartitionCols:_col0
-                        Select Operator [SEL_38] (rows=500 width=10)
-                          Output:["_col0","_col1"]
-                          Filter Operator [FIL_116] (rows=500 width=10)
-                            predicate:key is not null
-                            TableScan [TS_36] (rows=500 width=10)
-                              default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-                    <-Reducer 10 [SIMPLE_EDGE] llap
-                      SHUFFLE [RS_42]
-                        PartitionCols:_col1
-                        Merge Join Operator [MERGEJOIN_125] (rows=1127 width=10)
-                          Conds:Union 9._col0=RS_40._col1(Inner),Output:["_col1"]
-                        <-Map 14 [SIMPLE_EDGE] llap
-                          SHUFFLE [RS_40]
-                            PartitionCols:_col1
-                            Select Operator [SEL_35] (rows=500 width=10)
-                              Output:["_col0","_col1"]
-                              Filter Operator [FIL_115] (rows=500 width=10)
-                                predicate:(key is not null and value is not null)
-                                TableScan [TS_33] (rows=500 width=10)
-                                  default@src,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-                        <-Union 9 [SIMPLE_EDGE]
-                          <-Map 12 [CONTAINS] llap
-                            Reduce Output Operator [RS_39]
-                              PartitionCols:_col0
-                              Select Operator [SEL_26] (rows=500 width=10)
-                                Output:["_col0"]
-                                Filter Operator [FIL_113] (rows=500 width=10)
-                                  predicate:value is not null
-                                  TableScan [TS_24] (rows=500 width=10)
-                                    Output:["value"]
-                          <-Map 13 [CONTAINS] llap
-                            Reduce Output Operator [RS_39]
-                              PartitionCols:_col0
-                              Select Operator [SEL_31] (rows=500 width=10)
-                                Output:["_col0"]
-                                Filter Operator [FIL_114] (rows=500 width=10)
-                                  predicate:value is not null
-                                  TableScan [TS_29] (rows=500 width=10)
-                                    Output:["value"]
-                          <-Map 8 [CONTAINS] llap
-                            Reduce Output Operator [RS_39]
-                              PartitionCols:_col0
-                              Select Operator [SEL_23] (rows=25 width=7)
-                                Output:["_col0"]
-                                Filter Operator [FIL_112] (rows=25 width=7)
-                                  predicate:value is not null
-                                  TableScan [TS_21] (rows=25 width=7)
-                                    Output:["value"]
-                File Output Operator [FS_81]
-                  table:{"name:":"default.b"}
-                   Please refer to the previous Select Operator [SEL_45]
-                File Output Operator [FS_83]
-                  table:{"name:":"default.c"}
-                   Please refer to the previous Select Operator [SEL_45]
               <-Reducer 3 [CONTAINS] llap
                 File Output Operator [FS_79]
                   table:{"name:":"default.a"}
@@ -2853,8 +2777,7 @@ Stage-5
                                 Output:["_col0","_col1"]
                                 Filter Operator [FIL_110] (rows=25 width=7)
                                   predicate:(key is not null and value is not null)
-                                  TableScan [TS_8] (rows=25 width=7)
-                                    default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                                   Please refer to the previous TableScan [TS_8]
                           <-Select Operator [SEL_2] (rows=25 width=7)
                               Output:["_col0"]
                               Filter Operator [FIL_108] (rows=25 width=7)
@@ -2882,6 +2805,69 @@ Stage-5
                 File Output Operator [FS_83]
                   table:{"name:":"default.c"}
                    Please refer to the previous Select Operator [SEL_20]
+              <-Reducer 9 [CONTAINS] llap
+                File Output Operator [FS_79]
+                  table:{"name:":"default.a"}
+                  Select Operator [SEL_45] (rows=1239 width=10)
+                    Output:["_col0","_col1"]
+                    Merge Join Operator [MERGEJOIN_126] (rows=1239 width=10)
+                      Conds:RS_42._col1=RS_43._col0(Inner),Output:["_col1","_col4"]
+                    <-Map 14 [SIMPLE_EDGE] llap
+                      SHUFFLE [RS_43]
+                        PartitionCols:_col0
+                        Select Operator [SEL_38] (rows=500 width=10)
+                          Output:["_col0","_col1"]
+                          Filter Operator [FIL_116] (rows=500 width=10)
+                            predicate:key is not null
+                            TableScan [TS_36] (rows=500 width=10)
+                              default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
+                    <-Reducer 8 [SIMPLE_EDGE] llap
+                      SHUFFLE [RS_42]
+                        PartitionCols:_col1
+                        Merge Join Operator [MERGEJOIN_125] (rows=1127 width=10)
+                          Conds:Union 11._col0=RS_40._col1(Inner),Output:["_col1"]
+                        <-Map 7 [SIMPLE_EDGE] llap
+                          SHUFFLE [RS_40]
+                            PartitionCols:_col1
+                            Select Operator [SEL_35] (rows=500 width=10)
+                              Output:["_col0","_col1"]
+                              Filter Operator [FIL_115] (rows=500 width=10)
+                                predicate:(key is not null and value is not null)
+                                 Please refer to the previous TableScan [TS_11]
+                        <-Union 11 [SIMPLE_EDGE]
+                          <-Map 10 [CONTAINS] llap
+                            Reduce Output Operator [RS_39]
+                              PartitionCols:_col0
+                              Select Operator [SEL_23] (rows=25 width=7)
+                                Output:["_col0"]
+                                Filter Operator [FIL_112] (rows=25 width=7)
+                                  predicate:value is not null
+                                  TableScan [TS_21] (rows=25 width=7)
+                                    Output:["value"]
+                          <-Map 12 [CONTAINS] llap
+                            Reduce Output Operator [RS_39]
+                              PartitionCols:_col0
+                              Select Operator [SEL_26] (rows=500 width=10)
+                                Output:["_col0"]
+                                Filter Operator [FIL_113] (rows=500 width=10)
+                                  predicate:value is not null
+                                  TableScan [TS_24] (rows=500 width=10)
+                                    Output:["value"]
+                          <-Map 13 [CONTAINS] llap
+                            Reduce Output Operator [RS_39]
+                              PartitionCols:_col0
+                              Select Operator [SEL_31] (rows=500 width=10)
+                                Output:["_col0"]
+                                Filter Operator [FIL_114] (rows=500 width=10)
+                                  predicate:value is not null
+                                  TableScan [TS_29] (rows=500 width=10)
+                                    Output:["value"]
+                File Output Operator [FS_81]
+                  table:{"name:":"default.b"}
+                   Please refer to the previous Select Operator [SEL_45]
+                File Output Operator [FS_83]
+                  table:{"name:":"default.c"}
+                   Please refer to the previous Select Operator [SEL_45]
 Stage-6
   Stats-Aggr Operator
     Stage-1
@@ -2931,23 +2917,23 @@ Plan optimized by CBO.
 
 Vertex dependency in root stage
 Map 1 <- Union 2 (CONTAINS)
-Map 12 <- Union 13 (CONTAINS)
-Map 19 <- Union 13 (CONTAINS)
-Map 20 <- Union 15 (CONTAINS)
-Map 23 <- Map 24 (BROADCAST_EDGE)
-Map 25 <- Union 26 (CONTAINS)
-Map 32 <- Union 26 (CONTAINS)
-Map 33 <- Union 28 (CONTAINS)
-Map 34 <- Union 30 (CONTAINS)
+Map 10 <- Map 22 (BROADCAST_EDGE)
+Map 14 <- Union 15 (CONTAINS)
+Map 19 <- Union 15 (CONTAINS)
+Map 20 <- Union 17 (CONTAINS)
+Map 23 <- Union 24 (CONTAINS)
+Map 30 <- Union 24 (CONTAINS)
+Map 31 <- Union 26 (CONTAINS)
+Map 32 <- Union 28 (CONTAINS)
 Map 9 <- Union 2 (CONTAINS)
-Reducer 14 <- Union 13 (SIMPLE_EDGE), Union 15 (CONTAINS)
-Reducer 16 <- Union 15 (SIMPLE_EDGE)
-Reducer 17 <- Map 21 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-Reducer 18 <- Map 22 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 12 <- Map 11 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
+Reducer 13 <- Map 21 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 16 <- Union 15 (SIMPLE_EDGE), Union 17 (CONTAINS)
+Reducer 18 <- Union 17 (SIMPLE_EDGE)
+Reducer 25 <- Union 24 (SIMPLE_EDGE), Union 26 (CONTAINS)
 Reducer 27 <- Union 26 (SIMPLE_EDGE), Union 28 (CONTAINS)
-Reducer 29 <- Union 28 (SIMPLE_EDGE), Union 30 (CONTAINS)
+Reducer 29 <- Map 10 (BROADCAST_EDGE), Union 28 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 3 <- Map 10 (BROADCAST_EDGE), Union 2 (SIMPLE_EDGE)
-Reducer 31 <- Map 23 (BROADCAST_EDGE), Union 30 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 8 <- Union 7 (SIMPLE_EDGE)
@@ -2966,7 +2952,7 @@ Stage-5
                 Group By Operator [GBY_120] (rows=530 width=10)
                   Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
                 <-Union 7 [SIMPLE_EDGE]
-                  <-Reducer 31 [CONTAINS] llap
+                  <-Reducer 29 [CONTAINS] llap
                     Reduce Output Operator [RS_119]
                       PartitionCols:_col0, _col1
                       Group By Operator [GBY_118] (rows=1061 width=10)
@@ -2975,12 +2961,12 @@ Stage-5
                           Output:["_col0","_col1"]
                           Map Join Operator [MAPJOIN_172] (rows=484 width=10)
                             Conds:RS_111._col1=SEL_107._col1(Inner),Output:["_col0","_col3"]
-                          <-Map 23 [BROADCAST_EDGE] llap
+                          <-Map 10 [BROADCAST_EDGE] llap
                             BROADCAST [RS_111]
                               PartitionCols:_col1
                               Map Join Operator [MAPJOIN_171] (rows=27 width=7)
                                 Conds:SEL_71._col0=RS_109._col0(Inner),Output:["_col0","_col1","_col3"]
-                              <-Map 24 [BROADCAST_EDGE] llap
+                              <-Map 22 [BROADCAST_EDGE] llap
                                 BROADCAST [RS_109]
                                   PartitionCols:_col0
                                   Select Operator [SEL_74] (rows=25 width=7)
@@ -2993,14 +2979,14 @@ Stage-5
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_161] (rows=25 width=7)
                                     predicate:(key is not null and value is not null)
-                                    TableScan [TS_69] (rows=25 width=7)
+                                    TableScan [TS_13] (rows=25 width=7)
                                       default@src1,x,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
                           <-Select Operator [SEL_107] (rows=440 width=10)
                               Output:["_col1"]
                               Group By Operator [GBY_106] (rows=440 width=10)
                                 Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                              <-Union 30 [SIMPLE_EDGE]
-                                <-Map 34 [CONTAINS] llap
+                              <-Union 28 [SIMPLE_EDGE]
+                                <-Map 32 [CONTAINS] llap
                                   Reduce Output Operator [RS_105]
                                     PartitionCols:_col0, _col1
                                     Group By Operator [GBY_104] (rows=881 width=10)
@@ -3011,7 +2997,7 @@ Stage-5
                                           predicate:value is not null
                                           TableScan [TS_98] (rows=500 width=10)
                                             Output:["key","value"]
-                                <-Reducer 29 [CONTAINS] llap
+                                <-Reducer 27 [CONTAINS] llap
                                   Reduce Output Operator [RS_105]
                                     PartitionCols:_col0, _col1
                                     Group By Operator [GBY_104] (rows=881 width=10)
@@ -3020,8 +3006,8 @@ Stage-5
                                         Output:["_col0","_col1"]
                                         Group By Operator [GBY_96] (rows=381 width=10)
                                           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                        <-Union 28 [SIMPLE_EDGE]
-                                          <-Map 33 [CONTAINS] llap
+                                        <-Union 26 [SIMPLE_EDGE]
+                                          <-Map 31 [CONTAINS] llap
                                             Reduce Output Operator [RS_95]
                                               PartitionCols:_col0, _col1
                                               Group By Operator [GBY_94] (rows=762 width=10)
@@ -3032,7 +3018,7 @@ Stage-5
                                                     predicate:value is not null
                                                     TableScan [TS_88] (rows=500 width=10)
                                                       Output:["key","value"]
-                                          <-Reducer 27 [CONTAINS] llap
+                                          <-Reducer 25 [CONTAINS] llap
                                             Reduce Output Operator [RS_95]
                                               PartitionCols:_col0, _col1
                                               Group By Operator [GBY_94] (rows=762 width=10)
@@ -3041,8 +3027,8 @@ Stage-5
                                                   Output:["_col0","_col1"]
                                                   Group By Operator [GBY_86] (rows=262 width=10)
                                                     Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
-                                                  <-Union 26 [SIMPLE_EDGE]
-                                                    <-Map 25 [CONTAINS] llap
+                                                  <-Union 24 [SIMPLE_EDGE]
+                                                    <-Map 23 [CONTAINS] llap
                                                       Reduce Output Operator [RS_85]
                                                         PartitionCols:_col0, _col1
                                                         Group By Operator [GBY_84] (rows=525 width=10)
@@ -3053,7 +3039,7 @@ Stage-5
                                                               predicate:value is not null
                                                               TableScan [TS_75] (rows=25 width=7)
                                                                 Output:["key","value"]
-                                                    <-Map 32 [CONTAINS] llap
+                                                    <-Map 30 [CONTAINS] llap
                                                       Reduce Output Operator [RS_85]
                                                         PartitionCols:_col0, _col1
                                                         Group By Operator [GBY_84] (rows=525 width=10)
@@ -3072,7 +3058,7 @@ Stage-5
                         Group By Operator [GBY_67] (rows=577 width=10)
                           Output:["_col0","_col1"],keys:KEY._col0, KEY._col1
                         <-Union 5 [SIMPLE_EDGE]
-                          <-Reducer 18 [CONTAINS] llap
+                          <-Reducer 13 [CONTAINS] llap
                             Reduce Output Operator [RS_66]
                               PartitionCols:_col0, _col1
                               Group By Operator [GBY_65] (rows=1155 width=10)
@@ -3081,7 +3067,7 @@ Stage-5
                                   Output:["_col0","_col1"]
                                   Merge Join Operator [MERGEJOIN_170] (rows=605 width=10)
                                     Conds:RS_58._col2=RS_59._col0(Inner),Output:["_col2","_col5"]
-                                  <-Map 22 [SIMPLE_EDGE] llap
+                                  <-Map 21 [SIMPLE_EDGE] llap
                                     SHUFFLE [RS_59]
                                       PartitionCols:_col0
                                       Select Operator [SEL_54] (rows=500 width=10)
@@ -3090,28 +3076,28 @@ Stage-5
                                           predicate:key is not null
                                           TableScan [TS_52] (rows=500 width=10)
                                             default@src,y,Tbl:COMPLETE,Col:NONE,Output:["key","value"]
-                                  <-Reducer 17 [SIMPLE_EDGE] llap
+                                  <-Reducer 12 [SIMPLE_EDGE] llap
                                     SHUFFLE [RS_58]
        

<TRUNCATED>

[19/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query1.q.out b/ql/src/test/results/clientpositive/perf/query1.q.out
index d84c76d..7c583e4 100644
--- a/ql/src/test/results/clientpositive/perf/query1.q.out
+++ b/ql/src/test/results/clientpositive/perf/query1.q.out
@@ -47,14 +47,14 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 14 (SIMPLE_EDGE)
-Reducer 13 <- Reducer 12 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 10 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 9 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 10 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-Reducer 6 <- Reducer 13 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+Reducer 4 <- Map 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 5 <- Map 12 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 6 <- Reducer 5 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Map 10 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -74,48 +74,12 @@ Stage-0
                   predicate:(_col2 > CASE WHEN (_col8 is null) THEN (null) ELSE (_col7) END)
                   Merge Join Operator [MERGEJOIN_78] (rows=96800003 width=860)
                     Conds:RS_45._col1=RS_46._col2(Left Outer),Output:["_col2","_col6","_col7","_col8"]
-                  <-Reducer 13 [SIMPLE_EDGE]
-                    SHUFFLE [RS_46]
-                      PartitionCols:_col2
-                      Select Operator [SEL_38] (rows=15837566 width=77)
-                        Output:["_col0","_col1","_col2"]
-                        Group By Operator [GBY_37] (rows=15837566 width=77)
-                          Output:["_col0","_col1"],aggregations:["avg(_col2)"],keys:_col1
-                          Select Operator [SEL_33] (rows=31675133 width=77)
-                            Output:["_col1","_col2"]
-                            Group By Operator [GBY_32] (rows=31675133 width=77)
-                              Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
-                            <-Reducer 12 [SIMPLE_EDGE]
-                              SHUFFLE [RS_31]
-                                PartitionCols:_col0
-                                Group By Operator [GBY_30] (rows=63350266 width=77)
-                                  Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col2, _col1
-                                  Merge Join Operator [MERGEJOIN_77] (rows=63350266 width=77)
-                                    Conds:RS_26._col0=RS_27._col0(Inner),Output:["_col1","_col2","_col3"]
-                                  <-Map 11 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_26]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_22] (rows=57591150 width=77)
-                                        Output:["_col0","_col1","_col2","_col3"]
-                                        Filter Operator [FIL_72] (rows=57591150 width=77)
-                                          predicate:(sr_returned_date_sk is not null and sr_store_sk is not null)
-                                          TableScan [TS_20] (rows=57591150 width=77)
-                                            default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_returned_date_sk","sr_customer_sk","sr_store_sk","sr_fee"]
-                                  <-Map 14 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_27]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_25] (rows=36524 width=1119)
-                                        Output:["_col0"]
-                                        Filter Operator [FIL_73] (rows=36524 width=1119)
-                                          predicate:((d_year = 2000) and d_date_sk is not null)
-                                          TableScan [TS_23] (rows=73049 width=1119)
-                                            default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
                   <-Reducer 5 [SIMPLE_EDGE]
                     SHUFFLE [RS_45]
                       PartitionCols:_col1
                       Merge Join Operator [MERGEJOIN_76] (rows=88000001 width=860)
                         Conds:RS_42._col0=RS_43._col0(Inner),Output:["_col1","_col2","_col6"]
-                      <-Map 10 [SIMPLE_EDGE]
+                      <-Map 12 [SIMPLE_EDGE]
                         SHUFFLE [RS_43]
                           PartitionCols:_col0
                           Select Operator [SEL_19] (rows=80000000 width=860)
@@ -129,7 +93,7 @@ Stage-0
                           PartitionCols:_col0
                           Merge Join Operator [MERGEJOIN_75] (rows=34842647 width=77)
                             Conds:RS_39._col1=RS_40._col0(Inner),Output:["_col0","_col1","_col2"]
-                          <-Map 9 [SIMPLE_EDGE]
+                          <-Map 11 [SIMPLE_EDGE]
                             SHUFFLE [RS_40]
                               PartitionCols:_col0
                               Select Operator [SEL_16] (rows=852 width=1910)
@@ -161,7 +125,7 @@ Stage-0
                                               predicate:(sr_returned_date_sk is not null and sr_store_sk is not null and sr_customer_sk is not null)
                                               TableScan [TS_0] (rows=57591150 width=77)
                                                 default@store_returns,store_returns,Tbl:COMPLETE,Col:NONE,Output:["sr_returned_date_sk","sr_customer_sk","sr_store_sk","sr_fee"]
-                                      <-Map 8 [SIMPLE_EDGE]
+                                      <-Map 10 [SIMPLE_EDGE]
                                         SHUFFLE [RS_7]
                                           PartitionCols:_col0
                                           Select Operator [SEL_5] (rows=36524 width=1119)
@@ -170,4 +134,38 @@ Stage-0
                                               predicate:((d_year = 2000) and d_date_sk is not null)
                                               TableScan [TS_3] (rows=73049 width=1119)
                                                 default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year"]
+                  <-Reducer 9 [SIMPLE_EDGE]
+                    SHUFFLE [RS_46]
+                      PartitionCols:_col2
+                      Select Operator [SEL_38] (rows=15837566 width=77)
+                        Output:["_col0","_col1","_col2"]
+                        Group By Operator [GBY_37] (rows=15837566 width=77)
+                          Output:["_col0","_col1"],aggregations:["avg(_col2)"],keys:_col1
+                          Select Operator [SEL_33] (rows=31675133 width=77)
+                            Output:["_col1","_col2"]
+                            Group By Operator [GBY_32] (rows=31675133 width=77)
+                              Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
+                            <-Reducer 8 [SIMPLE_EDGE]
+                              SHUFFLE [RS_31]
+                                PartitionCols:_col0
+                                Group By Operator [GBY_30] (rows=63350266 width=77)
+                                  Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col2, _col1
+                                  Merge Join Operator [MERGEJOIN_77] (rows=63350266 width=77)
+                                    Conds:RS_26._col0=RS_27._col0(Inner),Output:["_col1","_col2","_col3"]
+                                  <-Map 1 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_26]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_22] (rows=57591150 width=77)
+                                        Output:["_col0","_col1","_col2","_col3"]
+                                        Filter Operator [FIL_72] (rows=57591150 width=77)
+                                          predicate:(sr_returned_date_sk is not null and sr_store_sk is not null)
+                                           Please refer to the previous TableScan [TS_0]
+                                  <-Map 10 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_27]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_25] (rows=36524 width=1119)
+                                        Output:["_col0"]
+                                        Filter Operator [FIL_73] (rows=36524 width=1119)
+                                          predicate:((d_year = 2000) and d_date_sk is not null)
+                                           Please refer to the previous TableScan [TS_3]
 


[35/50] [abbrv] hive git commit: HIVE-16645: Commands.java has missed the catch statement and has some code format errors (Saijin Huang, reviewed by Aihua Xu & Peter Vary)

Posted by we...@apache.org.
HIVE-16645: Commands.java has missed the catch statement and has some code format errors (Saijin Huang, reviewed by Aihua Xu & Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/455ffdd9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/455ffdd9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/455ffdd9

Branch: refs/heads/hive-14535
Commit: 455ffdd9125bdfe73b2c7f7ddebaeff138b77f53
Parents: 77f44b6
Author: Aihua Xu <ai...@apache.org>
Authored: Mon May 15 11:09:08 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon May 15 11:36:07 2017 -0400

----------------------------------------------------------------------
 .../src/java/org/apache/hive/beeline/Commands.java  | 16 ++++++++++++----
 1 file changed, 12 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/455ffdd9/beeline/src/java/org/apache/hive/beeline/Commands.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java
index da896a7..bcbc574 100644
--- a/beeline/src/java/org/apache/hive/beeline/Commands.java
+++ b/beeline/src/java/org/apache/hive/beeline/Commands.java
@@ -304,6 +304,7 @@ public class Commands {
         try {
           rs.close();
         } catch (Exception e) {
+          beeLine.error(e);
         }
       }
       // run as a batch
@@ -787,6 +788,7 @@ public class Commands {
   private BufferedRows getConfInternal(boolean call) {
     Statement stmnt = null;
     BufferedRows rows = null;
+    ResultSet rs = null;
     try {
       boolean hasResults = false;
       DatabaseConnection dbconn = beeLine.getDatabaseConnection();
@@ -803,19 +805,26 @@ public class Commands {
         }
       }
       if (hasResults) {
-        ResultSet rs = stmnt.getResultSet();
+        rs = stmnt.getResultSet();
         rows = new BufferedRows(beeLine, rs);
       }
     } catch (SQLException e) {
       beeLine.error(e);
     } finally {
-      if (stmnt != null) {
+      if (rs != null) {
         try {
-          stmnt.close();
+          rs.close();
         } catch (SQLException e1) {
           beeLine.error(e1);
         }
       }
+      if (stmnt != null) {
+        try {
+          stmnt.close();
+        } catch (SQLException e2) {
+          beeLine.error(e2);
+        }
+      }
     }
     return rows;
   }
@@ -1428,7 +1437,6 @@ public class Commands {
   public boolean closeall(String line) {
     if (close(null)) {
       while (close(null)) {
-        ;
       }
       return true;
     }


[44/50] [abbrv] hive git commit: HIVE-16619: Clean up javadoc from errors in module hive-serde (Janos Gub via Zoltan Haindrich)

Posted by we...@apache.org.
HIVE-16619: Clean up javadoc from errors in module hive-serde (Janos Gub via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/23e703f9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/23e703f9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/23e703f9

Branch: refs/heads/hive-14535
Commit: 23e703f96935ecd6c7daf6316329f305ccac6615
Parents: 202c513
Author: Janos Gub <gu...@gmail.com>
Authored: Tue May 16 08:30:58 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Tue May 16 08:30:58 2017 +0200

----------------------------------------------------------------------
 .../hadoop/hive/serde2/AbstractSerDe.java       |  2 +-
 .../hive/serde2/ColumnProjectionUtils.java      |  8 ++++----
 .../apache/hadoop/hive/serde2/SerDeUtils.java   |  2 +-
 .../serde2/avro/AvroLazyObjectInspector.java    |  2 +-
 .../hive/serde2/avro/AvroSchemaRetriever.java   |  2 +-
 .../hive/serde2/columnar/ColumnarSerDe.java     |  4 ++--
 .../serde2/columnar/ColumnarStructBase.java     |  2 +-
 .../dynamic_type/DynamicSerDeStructBase.java    |  2 +-
 .../serde2/dynamic_type/ParseException.java     |  2 +-
 .../hive/serde2/fast/DeserializeRead.java       |  6 +++---
 .../hive/serde2/io/TimestampWritable.java       |  6 +++---
 .../hadoop/hive/serde2/lazy/LazyDate.java       |  2 +-
 .../hadoop/hive/serde2/lazy/LazyFactory.java    | 21 ++++++++------------
 .../hive/serde2/lazy/LazyHiveDecimal.java       |  2 +-
 .../hive/serde2/lazy/LazySimpleSerDe.java       | 12 +++++------
 .../hive/serde2/lazybinary/LazyBinaryArray.java |  2 +-
 .../hive/serde2/lazybinary/LazyBinaryMap.java   |  2 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java |  6 +++---
 .../serde2/lazybinary/LazyBinaryString.java     |  2 +-
 .../serde2/lazybinary/LazyBinaryStruct.java     |  2 +-
 .../hive/serde2/lazybinary/LazyBinaryUnion.java |  2 +-
 .../hive/serde2/lazybinary/LazyBinaryUtils.java |  2 +-
 .../serde2/objectinspector/ObjectInspector.java |  2 +-
 .../objectinspector/ObjectInspectorUtils.java   |  4 ++--
 .../PrimitiveObjectInspectorFactory.java        |  6 +++---
 .../hive/serde2/typeinfo/TypeInfoUtils.java     |  4 ++--
 26 files changed, 52 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
index 049b35d..a2a85b3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
@@ -118,7 +118,7 @@ public abstract class AbstractSerDe implements Deserializer, Serializer {
   }
 
   /**
-   * @rturn Whether the SerDe that can store schema both inside and outside of metastore
+   * @return Whether the SerDe that can store schema both inside and outside of metastore
    *        does, in fact, store it inside metastore, based on table parameters.
    */
   public boolean shouldStoreFieldsInMetastore(Map<String, String> tableParams) {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
index 9844166..2009645 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
@@ -44,7 +44,7 @@ public final class ColumnProjectionUtils {
   /**
    * the nested column path is the string from the root to the leaf
    * e.g.
-   * c:struct<a:string,b:string>
+   * c:struct&lt;a:string,b:string&gt;
    * the column a's path is c.a and b's path is c.b
    */
   public static final String READ_NESTED_COLUMN_PATH_CONF_STR =
@@ -57,7 +57,7 @@ public final class ColumnProjectionUtils {
   private static final Joiner CSV_JOINER = Joiner.on(",").skipNulls();
 
   /**
-   * @deprecated for backwards compatibility with <= 0.12, use setReadAllColumns
+   * @deprecated for backwards compatibility with &lt;= 0.12, use setReadAllColumns
    */
   @Deprecated
   public static void setFullyReadColumns(Configuration conf) {
@@ -65,7 +65,7 @@ public final class ColumnProjectionUtils {
   }
 
   /**
-   * @deprecated for backwards compatibility with <= 0.12, use setReadAllColumns
+   * @deprecated for backwards compatibility with &lt;= 0.12, use setReadAllColumns
    * and appendReadColumns
    */
   @Deprecated
@@ -76,7 +76,7 @@ public final class ColumnProjectionUtils {
   }
 
   /**
-   * @deprecated for backwards compatibility with <= 0.12, use appendReadColumns
+   * @deprecated for backwards compatibility with &lt;= 0.12, use appendReadColumns
    */
   @Deprecated
   public static void appendReadColumnIDs(Configuration conf, List<Integer> ids) {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
index 9ead0ed..b9d23dd 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
@@ -163,7 +163,7 @@ public final class SerDeUtils {
    * Convert a Object to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0
    * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).
    *
-   * This method is kept consistent with {@link HiveResultSetMetaData#hiveTypeToSqlType}.
+   * This method is kept consistent with HiveResultSetMetaData#hiveTypeToSqlType .
    */
   public static Object toThriftPayload(Object val, ObjectInspector valOI, int version) {
     if (valOI.getCategory() == ObjectInspector.Category.PRIMITIVE) {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
index de9f4a8..ff8ac36 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
@@ -110,7 +110,7 @@ public class AvroLazyObjectInspector extends LazySimpleStructObjectInspector {
   /**
    * Set the {@link AvroSchemaRetriever} for the {@link AvroLazyObjectInspector} to the given class
    *
-   * @param scheamRetrieverClass the schema retriever class to be set
+   * @param schemaRetriever the schema retriever class to be set
    * */
   public void setSchemaRetriever(AvroSchemaRetriever schemaRetriever) {
     this.schemaRetriever = schemaRetriever;

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSchemaRetriever.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSchemaRetriever.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSchemaRetriever.java
index fcd2621..bed4de7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSchemaRetriever.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSchemaRetriever.java
@@ -22,7 +22,7 @@ import org.apache.avro.Schema;
 /**
  * Retrieves the avro schema from the given source. "Source" is a little loose term here in the
  * sense it can range from being an HDFS url location pointing to the schema or it can be even as
- * simple as a {@link Properties properties} file with a simple key-value mapping to the schema. For
+ * simple as a {@link java.util.Properties properties} file with a simple key-value mapping to the schema. For
  * cases where the {@link Schema schema} is a part of the serialized data itself, "Source" would
  * refer to the data bytes from which the {@link Schema schema} has to retrieved.
  *

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
index 36beaee..d353fab 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
@@ -88,7 +88,7 @@ public class ColumnarSerDe extends ColumnarSerDeBase {
   /**
    * Initialize the SerDe given the parameters.
    *
-   * @see AbstractSerDe#initialize(Configuration, Properties)
+   * @see org.apache.hadoop.hive.serde2.AbstractSerDe#initialize(Configuration, Properties)
    */
   @Override
   public void initialize(Configuration conf, Properties tbl) throws SerDeException {
@@ -123,7 +123,7 @@ public class ColumnarSerDe extends ColumnarSerDeBase {
    * @param objInspector
    *          The ObjectInspector for the row object
    * @return The serialized Writable object
-   * @see AbstractSerDe#serialize(Object, ObjectInspector)
+   * @see org.apache.hadoop.hive.serde2.AbstractSerDe#serialize(Object, ObjectInspector)
    */
   @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java
index fd06f58..33726d9 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStructBase.java
@@ -183,7 +183,7 @@ public abstract class ColumnarStructBase implements StructObject, SerDeStatsStru
    * @param length
    *          the length
    *
-   * @return -1 for null, >=0 for length
+   * @return -1 for null, &gt;=0 for length
    */
   protected abstract int getLength(ObjectInspector objectInspector,
       ByteArrayRef cachedByteArrayRef, int start, int length);

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java
index a15c9e1..fa1f305 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeStructBase.java
@@ -86,7 +86,7 @@ public abstract class DynamicSerDeStructBase extends DynamicSerDeTypeBase
    * @param o
    *          - this list should be in the order of the function's params for
    *          now. If we wanted to remove this requirement, we'd need to make it
-   *          a List<Pair<String, Object>> with the String being the field name.
+   *          a List&lt;Pair&lt;String, Object&gt;&gt; with the String being the field name.
    * 
    */
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java
index d7fde21..00424d3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/ParseException.java
@@ -19,7 +19,7 @@ public class ParseException extends Exception {
    * to indicate that this constructor was used to create this object. This
    * constructor calls its super class with the empty string to force the
    * "toString" method of parent class "Throwable" to print the error message in
-   * the form: ParseException: <result of getMessage>
+   * the form: ParseException: &lt;result of getMessage&gt;
    */
   public ParseException(Token currentTokenVal,
       int[][] expectedTokenSequencesVal, String[] tokenImageVal) {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
index ac931d6..cb775f7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
@@ -60,11 +60,11 @@ public abstract class DeserializeRead {
    *
    * if (deserializeRead.readNextField()) {
    *   if (deserializeRead.currentExternalBufferNeeded) {
-   *     <Ensure external buffer is as least deserializeRead.currentExternalBufferNeededLen bytes>
+   *     &lt;Ensure external buffer is as least deserializeRead.currentExternalBufferNeededLen bytes&gt;
    *     deserializeRead.copyToExternalBuffer(externalBuffer, externalBufferStart);
    *   } else {
-   *     <Otherwise, field data is available in the currentBytes, currentBytesStart, and
-   *      currentBytesLength of deserializeRead>
+   *     &lt;Otherwise, field data is available in the currentBytes, currentBytesStart, and
+   *      currentBytesLength of deserializeRead&gt;
    *   }
    *
    * @param typeInfos

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
index bbccc7f..af00a30 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
@@ -47,9 +47,9 @@ import org.apache.hadoop.io.WritableUtils;
  * The fractional portion is reversed, and encoded as a VInt
  * so timestamps with less precision use fewer bytes.
  *
- *      0.1    -> 1
- *      0.01   -> 10
- *      0.001  -> 100
+ *      0.1    -&gt; 1
+ *      0.01   -&gt; 10
+ *      0.001  -&gt; 100
  *
  */
 public class TimestampWritable implements WritableComparable<TimestampWritable> {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
index c00faac..ab20aaf 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
@@ -77,7 +77,7 @@ public class LazyDate extends LazyPrimitive<LazyDateObjectInspector, DateWritabl
    * Writes a Date in SQL date format to the output stream.
    * @param out
    *          The output stream
-   * @param i
+   * @param d
    *          The Date to write
    * @throws IOException
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
index 2b940fd..f87ac76 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
@@ -85,7 +85,7 @@ public final class LazyFactory {
    * @param poi PrimitiveObjectInspector
    * @param typeBinary a switch to return either a LazyPrimtive class or it's binary
    *        companion
-   * @return LazyPrimitive<? extends ObjectInspector, ? extends Writable>
+   * @return LazyPrimitive&lt;? extends ObjectInspector, ? extends Writable&gt;
    */
   public static LazyPrimitive<? extends ObjectInspector, ? extends Writable>
   createLazyPrimitiveClass(PrimitiveObjectInspector poi, boolean typeBinary) {
@@ -201,7 +201,7 @@ public final class LazyFactory {
    * @param oi ObjectInspector
    * @param typeBinary Boolean value used as switch to return variants of LazyPrimitive
    *                   objects which are initialized from a binary format for the data.
-   * @return LazyObject<? extends ObjectInspector>
+   * @return LazyObject&lt;? extends ObjectInspector&gt;
    */
   public static LazyObject<? extends ObjectInspector>
   createLazyObject(ObjectInspector oi, boolean typeBinary) {
@@ -219,7 +219,7 @@ public final class LazyFactory {
    *
    * @param typeInfo
    *          The type information for the LazyObject
-   * @param separator
+   * @param separators
    *          The array of separators for delimiting each level
    * @param separatorIndex
    *          The current level (for separators). List(array), struct uses 1
@@ -244,7 +244,7 @@ public final class LazyFactory {
    *
    * @param typeInfo
    *          The type information for the LazyObject
-   * @param separator
+   * @param separators
    *          The array of separators for delimiting each level
    * @param separatorIndex
    *          The current level (for separators). List(array), struct uses 1
@@ -267,13 +267,12 @@ public final class LazyFactory {
    * Create a hierarchical ObjectInspector for LazyObject with the given typeInfo.
    *
    * @param typeInfo The type information for the LazyObject
-   * @param separator The array of separators for delimiting each level
+   * @param separators The array of separators for delimiting each level
    * @param separatorIndex The current level (for separators). List(array), struct uses 1 level of
    *          separator, and map uses 2 levels: the first one for delimiting entries, the second one
    *          for delimiting key and values.
    * @param nullSequence The sequence of bytes representing NULL.
    * @param extendedBooleanLiteral whether extended boolean literal set is legal
-   * @param option the {@link ObjectInspectorOption}
    * @return The ObjectInspector
    * @throws SerDeException
    */
@@ -289,13 +288,13 @@ public final class LazyFactory {
    * Create a hierarchical ObjectInspector for LazyObject with the given typeInfo.
    *
    * @param typeInfo The type information for the LazyObject
-   * @param separator The array of separators for delimiting each level
+   * @param separators The array of separators for delimiting each level
    * @param separatorIndex The current level (for separators). List(array), struct uses 1 level of
    *          separator, and map uses 2 levels: the first one for delimiting entries, the second one
    *          for delimiting key and values.
    * @param nullSequence The sequence of bytes representing NULL.
    * @param extendedBooleanLiteral whether extended boolean literal set is legal
-   * @param option the {@link ObjectInspectorOption}
+   * @param option the {@link ObjectInspectorOptions}
    * @return The ObjectInspector
    * @throws SerDeException
    */
@@ -312,12 +311,11 @@ public final class LazyFactory {
    * Create a hierarchical ObjectInspector for LazyObject with the given typeInfo.
    *
    * @param typeInfo The type information for the LazyObject
-   * @param separator The array of separators for delimiting each level
    * @param separatorIndex The current level (for separators). List(array), struct uses 1 level of
    *          separator, and map uses 2 levels: the first one for delimiting entries, the second one
    *          for delimiting key and values.
    * @param lazyParams Params for lazy types
-   * @param option the {@link ObjectInspectorOption}
+   * @param option the {@link ObjectInspectorOptions}
    * @return The ObjectInspector
    * @throws SerDeException
    */
@@ -420,9 +418,6 @@ public final class LazyFactory {
    * Create a hierarchical ObjectInspector for LazyStruct with the given
    * columnNames and columnTypeInfos.
    *
-   * @param lastColumnTakesRest
-   *          whether the last column of the struct should take the rest of the
-   *          row if there are extra fields.
    * @param lazyParams  parameters for the lazy types
    * @throws SerDeException
    * @see LazyFactory#createLazyObjectInspector(TypeInfo, byte[], int, Text,

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
index 4d2ff22..9378cd1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java
@@ -126,7 +126,7 @@ public class LazyHiveDecimal extends LazyPrimitive<LazyHiveDecimalObjectInspecto
   /**
    * Writes HiveDecimalWritable object to output stream as string
    * @param outputStream
-   * @param hiveDecimal
+   * @param hiveDecimalWritable
    * @throws IOException
    */
   public static void writeUTF8(

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
index 17ecff1..c692c39 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
@@ -105,7 +105,7 @@ public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
    * ","-separated column names columns.types: ",", ":", or ";"-separated column
    * types
    *
-   * @see AbstractSerDe#initialize(Configuration, Properties)
+   * @see  org.apache.hadoop.hive.serde2.AbstractSerDe#initialize(Configuration, Properties)
    */
   @Override
   public void initialize(Configuration job, Properties tbl)
@@ -141,7 +141,7 @@ public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
    * @param field
    *          the Writable that contains the data
    * @return The deserialized row Object.
-   * @see AbstractSerDe#deserialize(Writable)
+   * @see  org.apache.hadoop.hive.serde2.AbstractSerDe#deserialize(Writable)
    */
   @Override
   public Object doDeserialize(Writable field) throws SerDeException {
@@ -167,7 +167,7 @@ public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
   /**
    * Returns the Writable Class after serialization.
    *
-   * @see AbstractSerDe#getSerializedClass()
+   * @see org.apache.hadoop.hive.serde2.AbstractSerDe#getSerializedClass()
    */
   @Override
   public Class<? extends Writable> getSerializedClass() {
@@ -185,8 +185,8 @@ public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
    * @param objInspector
    *          The ObjectInspector for the row object
    * @return The serialized Writable object
-   * @throws IOException
-   * @see AbstractSerDe#serialize(Object, ObjectInspector)
+   * @throws SerDeException
+   * @see org.apache.hadoop.hive.serde2.AbstractSerDe#serialize(Object, ObjectInspector)
    */
   @Override
   public Writable doSerialize(Object obj, ObjectInspector objInspector)
@@ -422,7 +422,7 @@ public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
 
   /**
    * This class is deprecated and is only used for backward compatibility. Replace by
-   * @see org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters.
+   * @see org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters .
    */
   @Deprecated
   public static class SerDeParameters extends org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters {

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java
index fee1472..6b1288c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
- * LazyBinaryArray is serialized as follows: start A b b b b b b end bytes[] ->
+ * LazyBinaryArray is serialized as follows: start A b b b b b b end bytes[] -&gt;
  * |--------|---|---|---|---| ... |---|---|
  * 
  * Section A is the null-bytes. Suppose the list has N elements, then there are

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java
index 1aa72ce..80e1e46 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 
 /**
- * LazyBinaryMap is serialized as follows: start A b c b c b c end bytes[] ->
+ * LazyBinaryMap is serialized as follows: start A b c b c b c end bytes[] -&gt;
  * |--------|---|---|---|---| ... |---|---|
  * 
  * Section A is the null-bytes. Suppose the map has N key-value pairs, then

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 7cdedd6..268f374 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -353,7 +353,7 @@ public class LazyBinarySerDe extends AbstractSerDe {
    * @param byteStream
    * @param dec
    * @param scratchLongs
-   * @param buffer
+   * @param scratchBytes
    */
   public static void writeToByteStream(
       RandomAccessOutput byteStream,
@@ -379,9 +379,9 @@ public class LazyBinarySerDe extends AbstractSerDe {
   * And, allocate scratch buffer with HiveDecimal.SCRATCH_BUFFER_LEN_BIG_INTEGER_BYTES bytes.
   *
   * @param byteStream
-  * @param dec
+  * @param decWritable
   * @param scratchLongs
-  * @param buffer
+  * @param scratchBytes
   */
   public static void writeToByteStream(
       RandomAccessOutput byteStream,

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryString.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryString.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryString.java
index 9f691d7..4f1ae56 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryString.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryString.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObj
 import org.apache.hadoop.io.Text;
 
 /**
- * The serialization of LazyBinaryString is very simple: start A end bytes[] ->
+ * The serialization of LazyBinaryString is very simple: start A end bytes[] -&gt;
  * |---------------------------------|
  * 
  * Section A is just an array of bytes which are exactly the Text contained in

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
index b4eb7bb..f594484 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.BinaryComparable;
 
 /**
- * LazyBinaryStruct is serialized as follows: start A B A B A B end bytes[] ->
+ * LazyBinaryStruct is serialized as follows: start A B A B A B end bytes[] -&gt;
  * |-----|---------|--- ... ---|-----|---------|
  *
  * Section A is one null-byte, corresponding to eight struct fields in Section

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java
index fbf05b6..819fbab 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java
@@ -29,7 +29,7 @@
   import org.apache.hadoop.hive.serde2.objectinspector.*;
 
 /**
- * LazyBinaryUnion is serialized as follows: start TAG FIELD end bytes[] ->
+ * LazyBinaryUnion is serialized as follows: start TAG FIELD end bytes[] -&gt;
  * |-----|---------|--- ... ---|-----|---------|
  *
  * Section TAG is one byte, corresponding to tag of set union field

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
index 5666516..4c67259 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
@@ -100,7 +100,7 @@ public final class LazyBinaryUtils {
   /**
    * Record is the unit that data is serialized in. A record includes two parts.
    * The first part stores the size of the element and the second part stores
-   * the real element. size element record -> |----|-------------------------|
+   * the real element. size element record -&gt; |----|-------------------------|
    *
    * A RecordInfo stores two information of a record, the size of the "size"
    * part which is the element offset and the size of the element part which is

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspector.java
index d90560b..7238fbc 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspector.java
@@ -50,7 +50,7 @@ public interface ObjectInspector extends Cloneable {
    * ObjectInspector. This is used to display the type information to the user.
    *
    * For primitive types, the type name is standardized. For other types, the
-   * type name can be something like "list<int>", "map<int,string>", java class
+   * type name can be something like "list&lt;int&gt;", "map&lt;int,string&gt;", java class
    * names, or user-defined type names similar to typedef.
    */
   String getTypeName();

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index 697d22e..0d6fd4a 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -1385,8 +1385,8 @@ public final class ObjectInspectorUtils {
    *
    * @param oi - Input object inspector
    * @param oiSettableProperties - Lookup map to cache the result.(If no caching, pass null)
-   * @return - true if : (1) oi is an instance of settable<DataType>OI.
-   *                     (2) All the embedded object inspectors are instances of settable<DataType>OI.
+   * @return - true if : (1) oi is an instance of settable&lt;DataType&gt;OI.
+   *                     (2) All the embedded object inspectors are instances of settable&lt;DataType&gt;OI.
    *           If (1) or (2) is false, return false.
    */
   public static boolean hasAllFieldsSettable(ObjectInspector oi,

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
index 9ea6609..2425c30 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -280,7 +280,7 @@ public final class PrimitiveObjectInspectorFactory {
 
   /**
    * Returns the PrimitiveWritableObjectInspector for the given type info
-   * @param PrimitiveTypeInfo    PrimitiveTypeInfo instance
+   * @param typeInfo PrimitiveTypeInfo instance
    * @return AbstractPrimitiveWritableObjectInspector instance
    */
   public static AbstractPrimitiveWritableObjectInspector getPrimitiveWritableObjectInspector(
@@ -316,7 +316,7 @@ public final class PrimitiveObjectInspectorFactory {
    * Returns a PrimitiveWritableObjectInspector which implements ConstantObjectInspector
    * for the PrimitiveCategory.
    *
-   * @param primitiveCategory
+   * @param typeInfo
    * @param value
    */
   public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspector(
@@ -385,7 +385,7 @@ public final class PrimitiveObjectInspectorFactory {
 
   /**
    * Returns the PrimitiveJavaObjectInspector for the given PrimitiveTypeInfo instance,
-   * @param PrimitiveTypeInfo    PrimitiveTypeInfo instance
+   * @param typeInfo PrimitiveTypeInfo instance
    * @return AbstractPrimitiveJavaObjectInspector instance
    */
   public static AbstractPrimitiveJavaObjectInspector getPrimitiveJavaObjectInspector(

http://git-wip-us.apache.org/repos/asf/hive/blob/23e703f9/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
index 54964e4..9f083ec 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
@@ -158,7 +158,7 @@ public final class TypeInfoUtils {
 
   /**
    * Returns the array element type, if the Type is an array (Object[]), or
-   * GenericArrayType (Map<String,String>[]). Otherwise return null.
+   * GenericArrayType (Map&lt;String,String&gt;[]). Otherwise return null.
    */
   public static Type getArrayElementType(Type t) {
     if (t instanceof Class && ((Class<?>) t).isArray()) {
@@ -176,7 +176,7 @@ public final class TypeInfoUtils {
    *
    * @param size
    *          In case the last parameter of Method is an array, we will try to
-   *          return a List<TypeInfo> with the specified size by repeating the
+   *          return a List&lt;TypeInfo&gt; with the specified size by repeating the
    *          element of the array at the end. In case the size is smaller than
    *          the minimum possible number of arguments for the method, null will
    *          be returned.


[39/50] [abbrv] hive git commit: HIVE-16324: Truncate table should not work when EXTERNAL property of table is true (Vihang Karajgaonkar via Aihua Xu, reviewed by Thejas M Nair)

Posted by we...@apache.org.
HIVE-16324: Truncate table should not work when EXTERNAL property of table is true (Vihang Karajgaonkar via Aihua Xu, reviewed by Thejas M Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7d4554dd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7d4554dd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7d4554dd

Branch: refs/heads/hive-14535
Commit: 7d4554dd1def433e0439cdbe7dfa665b6909e706
Parents: 413245e
Author: Aihua Xu <ai...@apache.org>
Authored: Mon May 15 16:08:50 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon May 15 16:08:50 2017 -0400

----------------------------------------------------------------------
 .../apache/hadoop/hive/metastore/ObjectStore.java  |  6 +++---
 .../hadoop/hive/metastore/cache/CachedStore.java   |  2 +-
 .../clientnegative/truncate_table_failure5.q       |  5 +++++
 .../clientnegative/truncate_table_failure6.q       |  5 +++++
 .../clientnegative/truncate_table_failure5.q.out   | 17 +++++++++++++++++
 .../clientnegative/truncate_table_failure6.q.out   | 17 +++++++++++++++++
 6 files changed, 48 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index ed19f42..ee48617 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -1155,7 +1155,7 @@ public class ObjectStore implements RawStore, Configurable {
     return mConstraints;
   }
 
-@Override
+  @Override
   public Table getTable(String dbName, String tableName) throws MetaException {
     boolean commited = false;
     Table tbl = null;
@@ -1434,7 +1434,7 @@ public class ObjectStore implements RawStore, Configurable {
       // for backwards compatibility with old metastore persistence
       if (mtbl.getViewOriginalText() != null) {
         tableType = TableType.VIRTUAL_VIEW.toString();
-      } else if ("TRUE".equals(mtbl.getParameters().get("EXTERNAL"))) {
+      } else if (Boolean.parseBoolean(mtbl.getParameters().get("EXTERNAL"))) {
         tableType = TableType.EXTERNAL_TABLE.toString();
       } else {
         tableType = TableType.MANAGED_TABLE.toString();
@@ -1466,7 +1466,7 @@ public class ObjectStore implements RawStore, Configurable {
     // If the table has property EXTERNAL set, update table type
     // accordingly
     String tableType = tbl.getTableType();
-    boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL"));
+    boolean isExternal = Boolean.parseBoolean(tbl.getParameters().get("EXTERNAL"));
     if (TableType.MANAGED_TABLE.toString().equals(tableType)) {
       if (isExternal) {
         tableType = TableType.EXTERNAL_TABLE.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index 5a187d8..1cc838f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -447,7 +447,7 @@ public class CachedStore implements RawStore, Configurable {
     // If the table has property EXTERNAL set, update table type
     // accordingly
     String tableType = tbl.getTableType();
-    boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL"));
+    boolean isExternal = Boolean.parseBoolean(tbl.getParameters().get("EXTERNAL"));
     if (TableType.MANAGED_TABLE.toString().equals(tableType)) {
       if (isExternal) {
         tableType = TableType.EXTERNAL_TABLE.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/ql/src/test/queries/clientnegative/truncate_table_failure5.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/truncate_table_failure5.q b/ql/src/test/queries/clientnegative/truncate_table_failure5.q
new file mode 100644
index 0000000..efabb34
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/truncate_table_failure5.q
@@ -0,0 +1,5 @@
+create table external1 (col1 int, col2 string);
+alter table external1 set tblproperties ('EXTERNAL'='true');
+
+-- truncate on a non-managed table should throw exception
+truncate table external1;

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/ql/src/test/queries/clientnegative/truncate_table_failure6.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/truncate_table_failure6.q b/ql/src/test/queries/clientnegative/truncate_table_failure6.q
new file mode 100644
index 0000000..b73ec63
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/truncate_table_failure6.q
@@ -0,0 +1,5 @@
+create table external1 (col1 int, col2 string);
+alter table external1 set tblproperties ('EXTERNAL'='TRUE');
+
+-- truncate on external table should throw exception. Property value of 'EXTERNAL' is not case sensitive
+truncate table external1;

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/ql/src/test/results/clientnegative/truncate_table_failure5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/truncate_table_failure5.q.out b/ql/src/test/results/clientnegative/truncate_table_failure5.q.out
new file mode 100644
index 0000000..d2f625d
--- /dev/null
+++ b/ql/src/test/results/clientnegative/truncate_table_failure5.q.out
@@ -0,0 +1,17 @@
+PREHOOK: query: create table external1 (col1 int, col2 string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@external1
+POSTHOOK: query: create table external1 (col1 int, col2 string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@external1
+PREHOOK: query: alter table external1 set tblproperties ('EXTERNAL'='true')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@external1
+PREHOOK: Output: default@external1
+POSTHOOK: query: alter table external1 set tblproperties ('EXTERNAL'='true')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@external1
+POSTHOOK: Output: default@external1
+FAILED: SemanticException [Error 10146]: Cannot truncate non-managed table external1.

http://git-wip-us.apache.org/repos/asf/hive/blob/7d4554dd/ql/src/test/results/clientnegative/truncate_table_failure6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/truncate_table_failure6.q.out b/ql/src/test/results/clientnegative/truncate_table_failure6.q.out
new file mode 100644
index 0000000..88987f5
--- /dev/null
+++ b/ql/src/test/results/clientnegative/truncate_table_failure6.q.out
@@ -0,0 +1,17 @@
+PREHOOK: query: create table external1 (col1 int, col2 string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@external1
+POSTHOOK: query: create table external1 (col1 int, col2 string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@external1
+PREHOOK: query: alter table external1 set tblproperties ('EXTERNAL'='TRUE')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@external1
+PREHOOK: Output: default@external1
+POSTHOOK: query: alter table external1 set tblproperties ('EXTERNAL'='TRUE')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@external1
+POSTHOOK: Output: default@external1
+FAILED: SemanticException [Error 10146]: Cannot truncate non-managed table external1.


[20/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_views.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_views.q.out b/ql/src/test/results/clientpositive/llap/subquery_views.q.out
index bfd56e6..191de4c 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_views.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_views.q.out
@@ -124,17 +124,17 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
-        Reducer 12 <- Map 11 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-        Reducer 14 <- Map 13 (SIMPLE_EDGE)
-        Reducer 16 <- Map 15 (SIMPLE_EDGE)
-        Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
-        Reducer 19 <- Map 18 (SIMPLE_EDGE)
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 10 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 10 <- Reducer 13 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+        Reducer 12 <- Map 11 (SIMPLE_EDGE)
+        Reducer 13 <- Map 11 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 12 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 7 <- Map 1 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 8 <- Map 1 (SIMPLE_EDGE)
+        Reducer 9 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -156,34 +156,6 @@ STAGE PLANS:
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 11 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  properties:
-                    insideView TRUE
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: (key < '11') (type: boolean)
-                    Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string), _col1 (type: string)
-                        sort order: ++
-                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                        Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 13 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((value = value) and (key = key) and (value > 'val_11')) (type: boolean)
                     Statistics: Num rows: 41 Data size: 7298 Basic stats: COMPLETE Column stats: COMPLETE
@@ -199,13 +171,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint), _col3 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 15 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((value = value) and (key = key) and (value > 'val_11')) (type: boolean)
                     Statistics: Num rows: 41 Data size: 7298 Basic stats: COMPLETE Column stats: COMPLETE
@@ -219,32 +184,18 @@ STAGE PLANS:
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 18 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  properties:
-                    insideView TRUE
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: key (type: string)
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (key < '11') (type: boolean)
+                    Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                        Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((value = value) and (key = key) and (value > 'val_11')) (type: boolean)
                     Statistics: Num rows: 41 Data size: 7298 Basic stats: COMPLETE Column stats: COMPLETE
@@ -260,13 +211,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col2 (type: bigint), _col3 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: ((value = value) and (key = key) and (value > 'val_11')) (type: boolean)
                     Statistics: Num rows: 41 Data size: 7298 Basic stats: COMPLETE Column stats: COMPLETE
@@ -282,7 +226,7 @@ STAGE PLANS:
                         Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 11 
             Map Operator Tree:
                 TableScan
                   alias: b
@@ -299,87 +243,21 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: key (type: string)
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 12 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Left Outer Join0 to 1
-                     Left Outer Join0 to 2
-                keys:
-                  0 _col0 (type: string), _col1 (type: string)
-                  1 _col0 (type: string), _col1 (type: string)
-                  2 _col4 (type: string), _col2 (type: string)
-                outputColumnNames: _col0, _col4, _col5, _col9
-                Statistics: Num rows: 1 Data size: 107 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: CASE WHEN ((_col4 = 0)) THEN (true) WHEN (_col4 is null) THEN (true) WHEN (_col9 is not null) THEN (false) WHEN (_col0 is null) THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END (type: boolean)
-                  Statistics: Num rows: 1 Data size: 107 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: _col0 (type: string)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      keys: _col0 (type: string)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 14 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0), count(VALUE._col1)
-                keys: KEY._col0 (type: string), KEY._col1 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string)
-                  sort order: ++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                  Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col2 (type: bigint), _col3 (type: bigint)
-        Reducer 16 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string), true (type: boolean)
-                  outputColumnNames: _col0, _col2, _col3
-                  Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col2 (type: string), _col3 (type: boolean)
-        Reducer 17 
-            Execution mode: llap
-            Reduce Operator Tree:
               Merge Join Operator
                 condition map:
                      Inner Join 0 to 1
@@ -394,7 +272,20 @@ STAGE PLANS:
                   Map-reduce partition columns: _col4 (type: string), _col2 (type: string)
                   Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col3 (type: boolean)
-        Reducer 19 
+        Reducer 12 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 13 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -451,7 +342,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -466,7 +357,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                   Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col2 (type: bigint), _col3 (type: bigint)
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -484,7 +375,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col2 (type: string), _col3 (type: boolean)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -501,6 +392,69 @@ STAGE PLANS:
                   Map-reduce partition columns: _col4 (type: string), _col2 (type: string)
                   Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col3 (type: boolean)
+        Reducer 7 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                     Left Outer Join0 to 2
+                keys:
+                  0 _col0 (type: string), _col1 (type: string)
+                  1 _col0 (type: string), _col1 (type: string)
+                  2 _col4 (type: string), _col2 (type: string)
+                outputColumnNames: _col0, _col4, _col5, _col9
+                Statistics: Num rows: 1 Data size: 107 Basic stats: COMPLETE Column stats: COMPLETE
+                Filter Operator
+                  predicate: CASE WHEN ((_col4 = 0)) THEN (true) WHEN (_col4 is null) THEN (true) WHEN (_col9 is not null) THEN (false) WHEN (_col0 is null) THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END (type: boolean)
+                  Statistics: Num rows: 1 Data size: 107 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: _col0 (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      keys: _col0 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), count(VALUE._col1)
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string)
+                  sort order: ++
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                  Statistics: Num rows: 20 Data size: 3880 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col2 (type: bigint), _col3 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string), true (type: boolean)
+                  outputColumnNames: _col0, _col2, _col3
+                  Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 20 Data size: 3640 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col2 (type: string), _col3 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/tez_join_tests.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/tez_join_tests.q.out b/ql/src/test/results/clientpositive/llap/tez_join_tests.q.out
index 4fa5854..13da5ad 100644
--- a/ql/src/test/results/clientpositive/llap/tez_join_tests.q.out
+++ b/ql/src/test/results/clientpositive/llap/tez_join_tests.q.out
@@ -15,7 +15,7 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Map 7 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
         Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -34,6 +34,16 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: string)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: string)
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
         Map 6 
@@ -52,23 +62,6 @@ STAGE PLANS:
                       Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: c
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col1 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col1 (type: string)
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/tez_joins_explain.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/tez_joins_explain.q.out b/ql/src/test/results/clientpositive/llap/tez_joins_explain.q.out
index b32e990..16e0032 100644
--- a/ql/src/test/results/clientpositive/llap/tez_joins_explain.q.out
+++ b/ql/src/test/results/clientpositive/llap/tez_joins_explain.q.out
@@ -15,7 +15,7 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Map 7 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
         Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -34,6 +34,16 @@ STAGE PLANS:
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col1 (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: string)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: string)
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
         Map 6 
@@ -52,23 +62,6 @@ STAGE PLANS:
                       Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: c
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col1 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col1 (type: string)
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
index a252c74..b6271a7 100644
--- a/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/unionDistinct_1.q.out
@@ -33,11 +33,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (CUSTOM_SIMPLE_EDGE), Union 5 (CONTAINS)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 4 <- Union 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
         Reducer 6 <- Union 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
+        Reducer 7 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 5 (CONTAINS)
+        Reducer 9 <- Map 8 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -56,13 +56,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s2
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
@@ -76,10 +69,10 @@ STAGE PLANS:
                         value expressions: _col0 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
-                  alias: s3
+                  alias: s2
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
@@ -94,28 +87,6 @@ STAGE PLANS:
                         value expressions: _col0 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: 'tst3' (type: string), _col0 (type: bigint)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: _col0 (type: string), _col1 (type: bigint)
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string), _col1 (type: bigint)
-                      sort order: ++
-                      Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint)
-                      Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -176,7 +147,29 @@ STAGE PLANS:
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: default.tmptable
-        Reducer 8 
+        Reducer 7 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: 'tst3' (type: string), _col0 (type: bigint)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: _col0 (type: string), _col1 (type: bigint)
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: bigint)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint)
+                      Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 9 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -273,11 +266,11 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (CUSTOM_SIMPLE_EDGE), Union 5 (CONTAINS)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 4 <- Union 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
         Reducer 6 <- Union 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
+        Reducer 7 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 5 (CONTAINS)
+        Reducer 9 <- Map 8 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -296,13 +289,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s2
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
@@ -316,10 +302,10 @@ STAGE PLANS:
                         value expressions: _col0 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
-                  alias: s3
+                  alias: s2
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
@@ -334,28 +320,6 @@ STAGE PLANS:
                         value expressions: _col0 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: 'tst3' (type: string), _col0 (type: bigint)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: _col0 (type: string), _col1 (type: bigint)
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string), _col1 (type: bigint)
-                      sort order: ++
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -421,7 +385,29 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 8 
+        Reducer 7 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: 'tst3' (type: string), _col0 (type: bigint)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: _col0 (type: string), _col1 (type: bigint)
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: bigint)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 9 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5386,13 +5372,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Map 11 <- Union 9 (CONTAINS)
-        Map 6 <- Union 3 (CONTAINS)
-        Reducer 10 <- Union 9 (SIMPLE_EDGE)
+        Map 10 <- Union 7 (CONTAINS)
+        Map 9 <- Union 3 (CONTAINS)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 4 <- Union 3 (SIMPLE_EDGE)
-        Reducer 5 <- Reducer 10 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE), Union 9 (CONTAINS)
+        Reducer 5 <- Reducer 4 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 6 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 7 (CONTAINS)
+        Reducer 8 <- Union 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5409,9 +5395,18 @@ STAGE PLANS:
                       sort order: 
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: bigint)
+                  Group By Operator
+                    aggregations: count(1)
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 11 
+        Map 10 
             Map Operator Tree:
                 TableScan
                   alias: s4
@@ -5435,7 +5430,7 @@ STAGE PLANS:
                           Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 6 
+        Map 9 
             Map Operator Tree:
                 TableScan
                   alias: s2
@@ -5459,36 +5454,6 @@ STAGE PLANS:
                           Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s3
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: count(1)
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string), KEY._col1 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col1 (type: string)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -5543,7 +5508,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5565,10 +5530,24 @@ STAGE PLANS:
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                       Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
+        Reducer 8 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 83 Data size: 22576 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col1 (type: string)
         Union 3 
             Vertex: Union 3
-        Union 9 
-            Vertex: Union 9
+        Union 7 
+            Vertex: Union 7
 
   Stage: Stage-0
     Fetch Operator
@@ -11368,13 +11347,13 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
-        Reducer 12 <- Map 11 (CUSTOM_SIMPLE_EDGE), Union 5 (CONTAINS)
-        Reducer 14 <- Map 13 (CUSTOM_SIMPLE_EDGE), Union 7 (CONTAINS)
+        Reducer 10 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 7 (CONTAINS)
+        Reducer 12 <- Map 11 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
         Reducer 4 <- Union 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
         Reducer 6 <- Union 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
         Reducer 8 <- Union 7 (SIMPLE_EDGE)
+        Reducer 9 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 5 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -11390,13 +11369,6 @@ STAGE PLANS:
                       Reduce Output Operator
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 11 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                     Limit
@@ -11405,13 +11377,6 @@ STAGE PLANS:
                       Reduce Output Operator
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 13 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
                     Limit
@@ -11422,7 +11387,7 @@ STAGE PLANS:
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 11 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -11444,7 +11409,7 @@ STAGE PLANS:
                 Number of rows: 1
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 2 (type: int)
+                  expressions: 4 (type: int)
                   outputColumnNames: _col0
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -11464,27 +11429,7 @@ STAGE PLANS:
                 Number of rows: 1
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: 3 (type: int)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    keys: _col0 (type: int)
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: int)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: int)
-                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 14 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Limit
-                Number of rows: 1
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: 4 (type: int)
+                  expressions: 2 (type: int)
                   outputColumnNames: _col0
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
@@ -11568,6 +11513,26 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Limit
+                Number of rows: 1
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: 3 (type: int)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    keys: _col0 (type: int)
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: int)
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
         Union 3 
             Vertex: Union 3
         Union 5 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/union_top_level.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/union_top_level.q.out b/ql/src/test/results/clientpositive/llap/union_top_level.q.out
index 52926b6..2fac8cc 100644
--- a/ql/src/test/results/clientpositive/llap/union_top_level.q.out
+++ b/ql/src/test/results/clientpositive/llap/union_top_level.q.out
@@ -208,10 +208,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Union 4 (CONTAINS)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
-        Reducer 8 <- Reducer 7 (SIMPLE_EDGE), Union 4 (CONTAINS)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (SIMPLE_EDGE), Union 4 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -232,32 +232,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: s2
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -273,7 +247,7 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 7 
             Map Operator Tree:
                 TableScan
                   alias: s1
@@ -290,6 +264,18 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 2 
@@ -330,7 +316,7 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Merge Join Operator
@@ -351,7 +337,7 @@ STAGE PLANS:
                     Statistics: Num rows: 1219 Data size: 216982 Basic stats: COMPLETE Column stats: COMPLETE
                     TopN Hash Memory Usage: 0.1
                     value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out
index 0175c38..e1ad06c 100644
--- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out
@@ -53,8 +53,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -77,13 +77,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                         Statistics: Num rows: 8 Data size: 2040 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col3 (type: bigint)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: t1
-                  Statistics: Num rows: 6 Data size: 1530 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (UDFToDouble(a) < 3.0) (type: boolean)
                     Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: NONE
@@ -142,7 +135,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: vectorized, llap
             Reduce Operator Tree:
               Group By Operator
@@ -197,8 +190,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -221,13 +214,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                         Statistics: Num rows: 8 Data size: 2040 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col3 (type: bigint)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: t1
-                  Statistics: Num rows: 6 Data size: 1530 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (UDFToDouble(a) < 3.0) (type: boolean)
                     Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: NONE
@@ -286,7 +272,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: vectorized, llap
             Reduce Operator Tree:
               Group By Operator
@@ -372,9 +358,9 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
+        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -397,13 +383,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                         Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col2 (type: bigint)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: t1
-                  Statistics: Num rows: 6 Data size: 1530 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (UDFToDouble(a) < 3.0) (type: boolean)
                     Statistics: Num rows: 2 Data size: 510 Basic stats: COMPLETE Column stats: NONE
@@ -477,7 +456,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 6 
+        Reducer 5 
             Execution mode: vectorized, llap
             Reduce Operator Tree:
               Group By Operator
@@ -492,7 +471,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 8 Data size: 2040 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
-        Reducer 7 
+        Reducer 6 
             Execution mode: vectorized, llap
             Reduce Operator Tree:
               Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out
index e412844..bbcb010 100644
--- a/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out
@@ -26,10 +26,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Map 1 <- Reducer 4 (BROADCAST_EDGE), Reducer 6 (BROADCAST_EDGE)
+        Map 1 <- Reducer 4 (BROADCAST_EDGE), Reducer 5 (BROADCAST_EDGE)
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
         Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
-        Reducer 6 <- Map 5 (SIMPLE_EDGE)
+        Reducer 5 <- Map 3 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -74,7 +74,7 @@ STAGE PLANS:
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
                         outputColumnNames: _col0, _col1, _col2, _col3, _col5
                         input vertices:
-                          1 Reducer 6
+                          1 Reducer 5
                         Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: COMPLETE
                         Filter Operator
                           Filter Vectorization:
@@ -145,24 +145,6 @@ STAGE PLANS:
                             nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: vectorized, llap
-            LLAP IO: no inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.mapred.TextInputFormat
-                allNative: false
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  TableScan Vectorization:
-                      native: true
-                      projectedOutputColumns: [0, 1]
                   Group By Operator
                     Group By Vectorization:
                         className: VectorGroupByOperator
@@ -250,7 +232,7 @@ STAGE PLANS:
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 6 
+        Reducer 5 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/vector_join30.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_join30.q.out b/ql/src/test/results/clientpositive/llap/vector_join30.q.out
index 280f92d..16782d9 100644
--- a/ql/src/test/results/clientpositive/llap/vector_join30.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_join30.q.out
@@ -775,10 +775,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE), Reducer 7 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (BROADCAST_EDGE), Reducer 6 (BROADCAST_EDGE)
         Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -854,24 +854,6 @@ STAGE PLANS:
                             nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                         Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: string)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: orcsrc
-                  Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
-                  TableScan Vectorization:
-                      native: true
-                      projectedOutputColumns: [0, 1]
                   Filter Operator
                     Filter Vectorization:
                         className: VectorFilterOperator
@@ -940,7 +922,7 @@ STAGE PLANS:
                   outputColumnNames: _col2, _col3
                   input vertices:
                     1 Reducer 5
-                    2 Reducer 7
+                    2 Reducer 6
                   Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: sum(hash(_col2,_col3))
@@ -1020,7 +1002,7 @@ STAGE PLANS:
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                   Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
-        Reducer 7 
+        Reducer 6 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -1115,10 +1097,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1180,24 +1162,6 @@ STAGE PLANS:
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: string)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: orcsrc
-                  Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
-                  TableScan Vectorization:
-                      native: true
-                      projectedOutputColumns: [0, 1]
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1337,7 +1301,7 @@ STAGE PLANS:
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                   Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -1432,10 +1396,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1497,24 +1461,6 @@ STAGE PLANS:
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: string)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: orcsrc
-                  Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
-                  TableScan Vectorization:
-                      native: true
-                      projectedOutputColumns: [0, 1]
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1654,7 +1600,7 @@ STAGE PLANS:
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                   Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -1749,10 +1695,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1814,24 +1760,6 @@ STAGE PLANS:
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: string)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: orcsrc
-                  Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
-                  TableScan Vectorization:
-                      native: true
-                      projectedOutputColumns: [0, 1]
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -1971,7 +1899,7 @@ STAGE PLANS:
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                   Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -2066,10 +1994,10 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
         Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2131,24 +2059,6 @@ STAGE PLANS:
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col0 (type: string)
-            Execution mode: vectorized, llap
-            LLAP IO: all inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: orcsrc
-                  Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
-                  TableScan Vectorization:
-                      native: true
-                      projectedOutputColumns: [0, 1]
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
@@ -2288,7 +2198,7 @@ STAGE PLANS:
                       nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                   Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
-        Reducer 8 
+        Reducer 7 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
index 3967d11..d776214 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out
@@ -2375,9 +2375,9 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2386,39 +2386,27 @@ STAGE PLANS:
                   alias: srcpart
                   filterExpr: (ds = '2008-04-08') (type: boolean)
                   Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 1000 Data size: 94000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
+                  Filter Operator
+                    predicate: (ds = '2008-04-08') (type: boolean)
+                    Select Operator
                       Statistics: Num rows: 1000 Data size: 94000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: vectorized, llap
-            LLAP IO: no inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.mapred.TextInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: srcpart
-                  filterExpr: (ds = '2008-04-08') (type: boolean)
-                  Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      keys: '2008-04-08' (type: string)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
+                        sort order: 
+                        Statistics: Num rows: 1000 Data size: 94000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (ds = '2008-04-08') (type: boolean)
+                    Select Operator
+                      Statistics: Num rows: 1000 Data size: 18624 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        keys: '2008-04-08' (type: string)
+                        mode: hash
+                        outputColumnNames: _col0
                         Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: vectorized, llap
             LLAP IO: no inputs
             Map Vectorization:
@@ -2470,7 +2458,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -6456,10 +6444,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE), Union 4 (CONTAINS)
-        Reducer 5 <- Map 1 (BROADCAST_EDGE), Union 4 (SIMPLE_EDGE)
-        Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE), Union 4 (CONTAINS)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
+        Reducer 4 <- Map 1 (BROADCAST_EDGE), Union 3 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
+        Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE), Union 3 (CONTAINS)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -6468,30 +6456,18 @@ STAGE PLANS:
                   alias: srcpart
                   filterExpr: ds is not null (type: boolean)
                   Statistics: Num rows: 2000 Data size: 389248 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: ds (type: string)
-                    outputColumnNames: _col0
+                  Filter Operator
+                    predicate: ds is not null (type: boolean)
                     Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
+                    Select Operator
+                      expressions: ds (type: string)
+                      outputColumnNames: _col0
                       Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: vectorized, llap
-            LLAP IO: no inputs
-            Map Vectorization:
-                enabled: true
-                enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true
-                groupByVectorOutput: true
-                inputFileFormats: org.apache.hadoop.mapred.TextInputFormat
-                allNative: true
-                usesVectorUDFAdaptor: false
-                vectorized: true
-        Map 2 
-            Map Operator Tree:
-                TableScan
-                  alias: srcpart
-                  Statistics: Num rows: 2000 Data size: 389248 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 2000 Data size: 368000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: ds (type: string)
                     outputColumnNames: ds
@@ -6515,7 +6491,7 @@ STAGE PLANS:
                 allNative: false
                 usesVectorUDFAdaptor: false
                 vectorized: true
-        Map 7 
+        Map 6 
             Map Operator Tree:
                 TableScan
                   alias: srcpart
@@ -6543,7 +6519,7 @@ STAGE PLANS:
                 allNative: false
                 usesVectorUDFAdaptor: false
                 vectorized: true
-        Reducer 3 
+        Reducer 2 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -6571,7 +6547,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 5 
+        Reducer 4 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -6606,7 +6582,7 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 6 
+        Reducer 5 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -6628,7 +6604,7 @@ STAGE PLANS:
                       input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 8 
+        Reducer 7 
             Execution mode: vectorized, llap
             Reduce Vectorization:
                 enabled: true
@@ -6656,8 +6632,8 @@ STAGE PLANS:
                       sort order: +
                       Map-reduce partition columns: _col0 (type: string)
                       Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-        Union 4 
-            Vertex: Union 4
+        Union 3 
+            Vertex: Union 3
 
   Stage: Stage-0
     Fetch Operator


[36/50] [abbrv] hive git commit: HIVE-16607: ColumnStatsAutoGatherContext regenerates HiveConf.HIVEQUERYID (Peter Vary, reviewed by Aihua Xu)

Posted by we...@apache.org.
HIVE-16607: ColumnStatsAutoGatherContext regenerates HiveConf.HIVEQUERYID (Peter Vary, reviewed by Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/86f74fdd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/86f74fdd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/86f74fdd

Branch: refs/heads/hive-14535
Commit: 86f74fdd26b83cee128d24055ee369e9b7f36201
Parents: 455ffdd
Author: Aihua Xu <ai...@apache.org>
Authored: Mon May 15 11:12:04 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon May 15 11:36:22 2017 -0400

----------------------------------------------------------------------
 .../mapreduce/TestHCatMultiOutputFormat.java    |   2 +-
 .../test/resources/testconfiguration.properties |   1 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   4 +-
 .../java/org/apache/hive/beeline/QFile.java     |  20 +-
 .../apache/hive/beeline/QFileBeeLineClient.java |   8 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  28 +-
 .../org/apache/hadoop/hive/ql/QueryState.java   | 146 ++++++---
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   2 +-
 .../ql/io/rcfile/stats/PartialScanTask.java     |   3 +-
 .../metadata/HiveMaterializedViewsRegistry.java |   5 +-
 .../ql/parse/ColumnStatsAutoGatherContext.java  |   9 +-
 .../hadoop/hive/ql/exec/TestExecDriver.java     |   3 +-
 .../ql/parse/TestMacroSemanticAnalyzer.java     |   2 +-
 .../hadoop/hive/ql/parse/TestQBCompact.java     |   2 +-
 .../ql/parse/TestQBJoinTreeApplyPredicate.java  |   3 +-
 .../hadoop/hive/ql/parse/TestQBSubQuery.java    |   3 +-
 .../parse/TestReplicationSemanticAnalyzer.java  |   3 +-
 .../ql/parse/TestSemanticAnalyzerFactory.java   |   2 +-
 .../parse/TestUpdateDeleteSemanticAnalyzer.java |   2 +-
 .../TestHiveAuthorizationTaskFactory.java       |   2 +-
 .../parse/authorization/TestPrivilegesV1.java   |   4 +-
 .../parse/authorization/TestPrivilegesV2.java   |   2 +-
 .../materialized_view_create_rewrite.q.out      | 322 +++++++++++++++++++
 .../hive/service/cli/operation/Operation.java   |  12 +-
 .../service/cli/operation/SQLOperation.java     |   6 +-
 25 files changed, 498 insertions(+), 98 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
index 6ff48ee..180e802 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java
@@ -356,7 +356,7 @@ public class TestHCatMultiOutputFormat {
    * @throws Exception if any error occurs
    */
   private List<String> getTableData(String table, String database) throws Exception {
-    QueryState queryState = new QueryState(null);
+    QueryState queryState = new QueryState.Builder().build();
     HiveConf conf = queryState.getConf();
     conf.addResource("hive-site.xml");
     ArrayList<String> results = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index a378a5d..51385cf 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -756,6 +756,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
 beeline.positive.include=drop_with_concurrency.q,\
   escape_comments.q,\
   mapjoin2.q,\
+  materialized_view_create_rewrite.q,\
   smb_mapjoin_1.q,\
   smb_mapjoin_10.q,\
   smb_mapjoin_11.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index b897ffa..d408321 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -566,7 +566,7 @@ public class QTestUtil {
       System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
     }
 
-    queryState = new QueryState(new HiveConf(Driver.class));
+    queryState = new QueryState.Builder().withHiveConf(new HiveConf(Driver.class)).build();
     if (useHBaseMetastore) {
       startMiniHBaseCluster();
     } else {
@@ -1896,7 +1896,7 @@ public class QTestUtil {
   public void resetParser() throws SemanticException {
     drv.init();
     pd = new ParseDriver();
-    queryState = new QueryState(conf);
+    queryState = new QueryState.Builder().withHiveConf(conf).build();
     sem = new SemanticAnalyzer(queryState);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
index 3d9ca99..af464b9 100644
--- a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
+++ b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java
@@ -63,6 +63,7 @@ public final class QFile {
   private static final String MASK_PATTERN = "#### A masked pattern was here ####\n";
 
   private String name;
+  private String databaseName;
   private File inputFile;
   private File rawOutputFile;
   private File outputFile;
@@ -81,6 +82,10 @@ public final class QFile {
     return name;
   }
 
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
   public File getInputFile() {
     return inputFile;
   }
@@ -163,8 +168,8 @@ public final class QFile {
    */
   private String revertReplaceTableNames(String source) {
     for (String table : srcTables) {
-      source = source.replaceAll("(?is)(\\s+)default\\." + table + "([\\s;\\n\\)])", "$1" + table
-          + "$2");
+      source = source.replaceAll("(?is)(?<!name:?|alias:?)(\\s+)default\\." + table
+          + "([\\s;\\n\\)])", "$1" + table + "$2");
     }
     return source;
   }
@@ -319,6 +324,7 @@ public final class QFile {
     public QFile getQFile(String name) throws IOException {
       QFile result = new QFile();
       result.name = name;
+      result.databaseName = "test_db_" + name;
       result.inputFile = new File(queryDirectory, name + ".q");
       result.rawOutputFile = new File(logDirectory, name + ".q.out.raw");
       result.outputFile = new File(logDirectory, name + ".q.out");
@@ -328,11 +334,13 @@ public final class QFile {
       result.afterExecuteLogFile = new File(logDirectory, name + ".q.afterExecute.log");
       result.rewriteSourceTables = rewriteSourceTables;
       result.specificFilterSet = new RegexFilterSet()
-          .addFilter("(PREHOOK|POSTHOOK): (Output|Input): database:" + name + "\n",
+          .addFilter("(PREHOOK|POSTHOOK): (Output|Input): database:" + result.databaseName + "\n",
               "$1: $2: database:default\n")
-          .addFilter("(PREHOOK|POSTHOOK): (Output|Input): " + name + "@", "$1: $2: default@")
-          .addFilter("name(:?) " + name + "\\.(.*)\n", "name$1 default.$2\n")
-          .addFilter("/" + name + ".db/", "/");
+          .addFilter("(PREHOOK|POSTHOOK): (Output|Input): " + result.databaseName + "@",
+              "$1: $2: default@")
+          .addFilter("name(:?) " + result.databaseName + "\\.(.*)\n", "name$1 default.$2\n")
+          .addFilter("alias(:?) " + result.databaseName + "\\.(.*)\n", "alias$1 default.$2\n")
+          .addFilter("/" + result.databaseName + ".db/", "/");
       result.converter = Converter.NONE;
       String input = FileUtils.readFileToString(result.inputFile, "UTF-8");
       if (input.contains("-- SORT_QUERY_RESULTS")) {

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java b/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
index 7c50e18..d67bf19 100644
--- a/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
+++ b/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java
@@ -77,9 +77,9 @@ public class QFileBeeLineClient implements AutoCloseable {
           "!set showheader false",
           "USE default;",
           "SHOW TABLES;",
-          "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;",
-          "CREATE DATABASE `" + qFile.getName() + "`;",
-          "USE `" + qFile.getName() + "`;",
+          "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;",
+          "CREATE DATABASE `" + qFile.getDatabaseName() + "`;",
+          "USE `" + qFile.getDatabaseName() + "`;",
           "set hive.in.test.short.logs=true;",
           "set hive.in.test.remove.logs=false;",
         },
@@ -98,7 +98,7 @@ public class QFileBeeLineClient implements AutoCloseable {
           "!set showheader true",
           "!set outputformat table",
           "USE default;",
-          "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;",
+          "DROP DATABASE IF EXISTS `" + qFile.getDatabaseName() + "` CASCADE;",
         },
         qFile.getAfterExecuteLogFile(),
         Converter.NONE);

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index f01efa5..9aced9f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -347,21 +347,21 @@ public class Driver implements CommandProcessor {
   }
 
   public Driver() {
-    this(new QueryState((SessionState.get() != null) ?
+    this(getNewQueryState((SessionState.get() != null) ?
         SessionState.get().getConf() : new HiveConf()), null);
   }
 
   public Driver(HiveConf conf) {
-    this(new QueryState(conf), null);
+    this(getNewQueryState(conf), null);
   }
 
   public Driver(HiveConf conf, Context ctx) {
-    this(new QueryState(conf), null);
+    this(getNewQueryState(conf), null);
     this.ctx = ctx;
   }
 
   public Driver(HiveConf conf, String userName) {
-    this(new QueryState(conf), userName, null);
+    this(getNewQueryState(conf), userName, null);
   }
 
   public Driver(QueryState queryState, String userName) {
@@ -369,7 +369,7 @@ public class Driver implements CommandProcessor {
   }
 
   public Driver(HiveConf conf, HooksLoader hooksLoader) {
-    this(new QueryState(conf), null, hooksLoader, null);
+    this(getNewQueryState(conf), null, hooksLoader, null);
   }
 
   public Driver(QueryState queryState, String userName, QueryInfo queryInfo) {
@@ -388,6 +388,15 @@ public class Driver implements CommandProcessor {
   }
 
   /**
+   * Generating the new QueryState object. Making sure, that the new queryId is generated.
+   * @param conf The HiveConf which should be used
+   * @return The new QueryState object
+   */
+  private static QueryState getNewQueryState(HiveConf conf) {
+    return new QueryState.Builder().withGenerateNewQueryId(true).withHiveConf(conf).build();
+  }
+
+  /**
    * Compile a new query. Any currently-planned query associated with this Driver is discarded.
    * Do not reset id for inner queries(index, etc). Task ids are used for task identity check.
    *
@@ -454,7 +463,7 @@ public class Driver implements CommandProcessor {
 
     LockedDriverState.setLockedDriverState(lDrvState);
 
-    String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID);
+    String queryId = queryState.getQueryId();
 
     //save some info for webUI for use after plan is freed
     this.queryDisplay.setQueryStr(queryStr);
@@ -1693,7 +1702,7 @@ public class Driver implements CommandProcessor {
     int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
     Metrics metrics = MetricsFactory.getInstance();
 
-    String queryId = conf.getVar(HiveConf.ConfVars.HIVEQUERYID);
+    String queryId = queryState.getQueryId();
     // Get the query string from the conf file as the compileInternal() method might
     // hide sensitive information during query redaction.
     String queryStr = conf.getQueryString();
@@ -1732,8 +1741,7 @@ public class Driver implements CommandProcessor {
       plan.setStarted();
 
       if (SessionState.get() != null) {
-        SessionState.get().getHiveHistory().startQuery(queryStr,
-            conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+        SessionState.get().getHiveHistory().startQuery(queryStr, queryId);
         SessionState.get().getHiveHistory().logPlanProgress(plan);
       }
       resStream = null;
@@ -2425,6 +2433,6 @@ public class Driver implements CommandProcessor {
     // repeated compile/execute calls create new contexts, plan, etc., so we don't need to worry
     // propagating queryState into those existing fields, or resetting them.
     releaseResources();
-    this.queryState = new QueryState(queryState.getConf());
+    this.queryState = getNewQueryState(queryState.getConf());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
index 6dfaa9f..fa7c323 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryState.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hive.ql.plan.HiveOperation;
  * The class to store query level info such as queryId. Multiple queries can run
  * in the same session, so SessionState is to hold common session related info, and
  * each QueryState is to hold query related info.
- *
  */
 public class QueryState {
   /**
@@ -39,48 +38,12 @@ public class QueryState {
    */
   private HiveOperation commandType;
 
-  public QueryState(HiveConf conf) {
-    this(conf, null, false);
-  }
-
-  public QueryState(HiveConf conf, Map<String, String> confOverlay, boolean runAsync) {
-    this.queryConf = createConf(conf, confOverlay, runAsync);
-  }
-
   /**
-   * If there are query specific settings to overlay, then create a copy of config
-   * There are two cases we need to clone the session config that's being passed to hive driver
-   * 1. Async query -
-   *    If the client changes a config setting, that shouldn't reflect in the execution already underway
-   * 2. confOverlay -
-   *    The query specific settings should only be applied to the query config and not session
-   * @return new configuration
+   * Private constructor, use QueryState.Builder instead
+   * @param conf The query specific configuration object
    */
-  private HiveConf createConf(HiveConf conf,
-      Map<String, String> confOverlay,
-      boolean runAsync) {
-
-    if ( (confOverlay != null && !confOverlay.isEmpty()) ) {
-      conf = (conf == null ? new HiveConf() : new HiveConf(conf));
-
-      // apply overlay query specific settings, if any
-      for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
-        try {
-          conf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
-        } catch (IllegalArgumentException e) {
-          throw new RuntimeException("Error applying statement specific settings", e);
-        }
-      }
-    } else if (runAsync) {
-      conf = (conf == null ? new HiveConf() : new HiveConf(conf));
-    }
-
-    if (conf == null) {
-      conf = new HiveConf();
-    }
-
-    conf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId());
-    return conf;
+  private QueryState(HiveConf conf) {
+    this.queryConf = conf;
   }
 
   public String getQueryId() {
@@ -109,4 +72,105 @@ public class QueryState {
   public HiveConf getConf() {
     return queryConf;
   }
+
+  /**
+   * Builder to instantiate the QueryState object.
+   */
+  public static class Builder {
+    private Map<String, String> confOverlay = null;
+    private boolean runAsync = false;
+    private boolean generateNewQueryId = false;
+    private HiveConf hiveConf = null;
+
+    /**
+     * Default constructor - use this builder to create a QueryState object
+     */
+    public Builder() {
+    }
+
+    /**
+     * Set this to true if the configuration should be detached from the original config. If not
+     * set the default value is false.
+     * @param runAsync If the configuration should be detached
+     * @return The builder
+     */
+    public Builder withRunAsync(boolean runAsync) {
+      this.runAsync = runAsync;
+      return this;
+    }
+
+    /**
+     * Set this if there are specific configuration values which should be added to the original
+     * config. If at least one value is set, then the configuration will be detached from the
+     * original one.
+     * @param confOverlay The query specific parameters
+     * @return The builder
+     */
+    public Builder withConfOverlay(Map<String, String> confOverlay) {
+      this.confOverlay = confOverlay;
+      return this;
+    }
+
+    /**
+     * Set this to true if new queryId should be generated, otherwise the original one will be kept.
+     * If not set the default value is false.
+     * @param generateNewQueryId If new queryId should be generated
+     * @return The builder
+     */
+    public Builder withGenerateNewQueryId(boolean generateNewQueryId) {
+      this.generateNewQueryId = generateNewQueryId;
+      return this;
+    }
+
+    /**
+     * The source HiveConf object used to create the QueryState. If runAsync is false, and the
+     * confOverLay is empty then we will reuse the hiveConf object as a backing datastore for the
+     * QueryState. We will create a clone of the hiveConf object otherwise.
+     * @param hiveConf The source HiveConf
+     * @return The builder
+     */
+    public Builder withHiveConf(HiveConf hiveConf) {
+      this.hiveConf = hiveConf;
+      return this;
+    }
+
+    /**
+     * Creates the QueryState object. The default values are:
+     * - runAsync false
+     * - confOverlay null
+     * - generateNewQueryId false
+     * - hiveConf null
+     * @return The generated QueryState object
+     */
+    public QueryState build() {
+      HiveConf queryConf = hiveConf;
+
+      if (queryConf == null) {
+        // Generate a new conf if necessary
+        queryConf = new HiveConf();
+      } else if (runAsync || (confOverlay != null && !confOverlay.isEmpty())) {
+        // Detach the original conf if necessary
+        queryConf = new HiveConf(queryConf);
+      }
+
+      // Set the specific parameters if needed
+      if (confOverlay != null && !confOverlay.isEmpty()) {
+        // apply overlay query specific settings, if any
+        for (Map.Entry<String, String> confEntry : confOverlay.entrySet()) {
+          try {
+            queryConf.verifyAndSet(confEntry.getKey(), confEntry.getValue());
+          } catch (IllegalArgumentException e) {
+            throw new RuntimeException("Error applying statement specific settings", e);
+          }
+        }
+      }
+
+      // Generate the new queryId if needed
+      if (generateNewQueryId) {
+        queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, QueryPlan.makeQueryId());
+      }
+
+      return new QueryState(queryConf);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index cf575de..b07d6b1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -740,7 +740,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
     DriverContext driverCxt = new DriverContext();
     Task task;
     if (conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
-      TezWork tezWork = new TezWork(conf.getVar(HiveConf.ConfVars.HIVEQUERYID), conf);
+      TezWork tezWork = new TezWork(queryState.getQueryId(), conf);
       mergeWork.setName("File Merge");
       tezWork.add(mergeWork);
       task = new TezTask();

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
index 77bce97..ad921f3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
@@ -344,7 +344,8 @@ public class PartialScanTask extends Task<PartialScanWork> implements
       }
     }
 
-    QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class));
+    QueryState queryState =
+        new QueryState.Builder().withHiveConf(new HiveConf(conf, PartialScanTask.class)).build();
     PartialScanWork mergeWork = new PartialScanWork(inputPaths);
     DriverContext driverCxt = new DriverContext();
     PartialScanTask taskExec = new PartialScanTask();

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
index b121eea..56c0163 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java
@@ -60,7 +60,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan;
 import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
@@ -70,7 +69,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.joda.time.Interval;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -330,7 +328,8 @@ public final class HiveMaterializedViewsRegistry {
   private static RelNode parseQuery(String viewQuery) {
     try {
       final ASTNode node = ParseUtils.parse(viewQuery);
-      final QueryState qs = new QueryState(SessionState.get().getConf());
+      final QueryState qs =
+          new QueryState.Builder().withHiveConf(SessionState.get().getConf()).build();
       CalcitePlanner analyzer = new CalcitePlanner(qs);
       analyzer.initCtx(new Context(SessionState.get().getConf()));
       analyzer.init(false);

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
index 3b719af..d72ff5cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -33,18 +32,15 @@ import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -135,12 +131,13 @@ public class ColumnStatsAutoGatherContext {
     ASTNode tree = ParseUtils.parse(analyzeCommand, ctx);
 
     //1. get the ColumnStatsSemanticAnalyzer
-    BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), tree);
+    QueryState queryState = new QueryState.Builder().withHiveConf(conf).build();
+    BaseSemanticAnalyzer baseSem = SemanticAnalyzerFactory.get(queryState, tree);
     ColumnStatsSemanticAnalyzer colSem = (ColumnStatsSemanticAnalyzer) baseSem;
 
     //2. get the rewritten AST
     ASTNode ast = colSem.rewriteAST(tree, this);
-    baseSem = SemanticAnalyzerFactory.get(new QueryState(conf), ast);
+    baseSem = SemanticAnalyzerFactory.get(queryState, ast);
     SemanticAnalyzer sem = (SemanticAnalyzer) baseSem;
     QB qb = new QB(null, null, false);
     ASTNode child = ast;

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index c7266bc..b4898e2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -62,7 +62,6 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.util.Shell;
 
 /**
  * Mimics the actual query compiler in generating end to end plans and testing
@@ -83,7 +82,7 @@ public class TestExecDriver extends TestCase {
 
   static {
     try {
-      queryState = new QueryState(new HiveConf(ExecDriver.class));
+      queryState = new QueryState.Builder().withHiveConf(new HiveConf(ExecDriver.class)).build();
       conf = queryState.getConf();
       conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true);
       conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, true);

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index c734988..deba1d5 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -42,7 +42,7 @@ public class TestMacroSemanticAnalyzer {
 
   @Before
   public void setup() throws Exception {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     conf = queryState.getConf();
     SessionState.start(conf);
     context = new Context(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
index 201622e..182ac2b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
@@ -50,7 +50,7 @@ public class TestQBCompact {
 
   @BeforeClass
   public static void init() throws Exception {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     conf = queryState.getConf();
     conf
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
index e607f10..45901c9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java
@@ -38,7 +38,8 @@ public class TestQBJoinTreeApplyPredicate {
 
   @BeforeClass
   public static void initialize() {
-    queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+    queryState =
+        new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
     conf = queryState.getConf();
     SessionState.start(conf);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
index 2674835..993b4da 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
@@ -48,7 +48,8 @@ public class TestQBSubQuery {
 
   @BeforeClass
   public static void initialize() {
-    queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+    queryState =
+        new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
     conf = queryState.getConf();
     SessionState.start(conf);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
index 80865bd..1cb4470 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
@@ -56,7 +56,8 @@ public class TestReplicationSemanticAnalyzer {
 
   @BeforeClass
   public static void initialize() throws HiveException {
-    queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
+    queryState =
+        new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
     conf = queryState.getConf();
     conf.set("hive.security.authorization.manager", "");
     SessionState.start(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
index 5849950..b19d42f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
@@ -32,7 +32,7 @@ public class TestSemanticAnalyzerFactory {
   
   @Before
   public void setup() throws Exception {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     conf = queryState.getConf();
   }
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index a573808..9c20521 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -223,7 +223,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
 
   @Before
   public void setup() {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     conf = queryState.getConf();
     conf
     .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
index 58cb4b4..746aa4b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
@@ -99,7 +99,7 @@ public class TestHiveAuthorizationTaskFactory {
 
   @Before
   public void setup() throws Exception {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     HiveConf conf = queryState.getConf();
     conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
         TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName());

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
index 5d01080..349f494 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.parse.authorization;
 
 import java.util.HashMap;
 
-import junit.framework.Assert;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -41,7 +39,7 @@ public class TestPrivilegesV1 extends PrivilegesTestBase{
 
   @Before
   public void setup() throws Exception {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     db = Mockito.mock(Hive.class);
     table = new Table(DB, TABLE);
     partition = new Partition(table);

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
index c552ba7..312770f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV2.java
@@ -40,7 +40,7 @@ public class TestPrivilegesV2 extends PrivilegesTestBase{
 
   @Before
   public void setup() throws Exception {
-    queryState = new QueryState(null);
+    queryState = new QueryState.Builder().build();
     //set authorization mode to V2
     HiveConf conf = queryState.getConf();
     conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
new file mode 100644
index 0000000..041621f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
@@ -0,0 +1,322 @@
+PREHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cmv_basetable
+POSTHOOK: query: create table cmv_basetable (a int, b varchar(256), c decimal(10,2), d int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@cmv_basetable
+PREHOOK: query: insert into cmv_basetable values
+ (1, 'alfred', 10.30, 2),
+ (2, 'bob', 3.14, 3),
+ (2, 'bonnie', 172342.2, 3),
+ (3, 'calvin', 978.76, 3),
+ (3, 'charlie', 9.8, 1)
+PREHOOK: type: QUERY
+PREHOOK: Output: default@cmv_basetable
+POSTHOOK: query: insert into cmv_basetable values
+ (1, 'alfred', 10.30, 2),
+ (2, 'bob', 3.14, 3),
+ (2, 'bonnie', 172342.2, 3),
+ (3, 'calvin', 978.76, 3),
+ (3, 'charlie', 9.8, 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@cmv_basetable
+POSTHOOK: Lineage: cmv_basetable.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: cmv_basetable.b EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: cmv_basetable.c EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: cmv_basetable.d EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
+PREHOOK: query: create materialized view cmv_mat_view enable rewrite
+as select a, b, c from cmv_basetable where a = 2
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cmv_mat_view
+POSTHOOK: query: create materialized view cmv_mat_view enable rewrite
+as select a, b, c from cmv_basetable where a = 2
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@cmv_mat_view
+PREHOOK: query: select * from cmv_mat_view
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_mat_view
+#### A masked pattern was here ####
+POSTHOOK: query: select * from cmv_mat_view
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_mat_view
+#### A masked pattern was here ####
+2	bob	3.14
+2	bonnie	172342.20
+PREHOOK: query: show tblproperties cmv_mat_view
+PREHOOK: type: SHOW_TBLPROPERTIES
+POSTHOOK: query: show tblproperties cmv_mat_view
+POSTHOOK: type: SHOW_TBLPROPERTIES
+numFiles	1
+totalSize	453
+#### A masked pattern was here ####
+PREHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite
+as select a, c from cmv_basetable where a = 3
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cmv_mat_view2
+POSTHOOK: query: create materialized view if not exists cmv_mat_view2 enable rewrite
+as select a, c from cmv_basetable where a = 3
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@cmv_mat_view2
+PREHOOK: query: select * from cmv_mat_view2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from cmv_mat_view2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+3	978.76
+3	9.80
+PREHOOK: query: show tblproperties cmv_mat_view2
+PREHOOK: type: SHOW_TBLPROPERTIES
+POSTHOOK: query: show tblproperties cmv_mat_view2
+POSTHOOK: type: SHOW_TBLPROPERTIES
+numFiles	1
+totalSize	322
+#### A masked pattern was here ####
+PREHOOK: query: explain
+select a, c from cmv_basetable where a = 3
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select a, c from cmv_basetable where a = 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: default.cmv_mat_view2
+          Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: a (type: int), c (type: decimal(10,2))
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+            ListSink
+
+PREHOOK: query: select a, c from cmv_basetable where a = 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+POSTHOOK: query: select a, c from cmv_basetable where a = 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+3	978.76
+3	9.80
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain
+select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.cmv_mat_view2
+            Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: c (type: decimal(10,2))
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 2 Data size: 322 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: decimal(10,2))
+          TableScan
+            alias: cmv_basetable
+            Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((d = 3) and (3 = a)) (type: boolean)
+              Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: c (type: decimal(10,2))
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: decimal(10,2))
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2))
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 2 Data size: 356 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_basetable
+PREHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Input: default@cmv_mat_view2
+#### A masked pattern was here ####
+3	9.80	3	978.76
+3	978.76	3	978.76
+PREHOOK: query: drop materialized view cmv_mat_view2
+PREHOOK: type: DROP_MATERIALIZED_VIEW
+PREHOOK: Input: default@cmv_mat_view2
+PREHOOK: Output: default@cmv_mat_view2
+POSTHOOK: query: drop materialized view cmv_mat_view2
+POSTHOOK: type: DROP_MATERIALIZED_VIEW
+POSTHOOK: Input: default@cmv_mat_view2
+POSTHOOK: Output: default@cmv_mat_view2
+Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain
+select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: cmv_basetable
+            Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (a = 3) (type: boolean)
+              Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: c (type: decimal(10,2))
+                outputColumnNames: _col0
+                Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: decimal(10,2))
+          TableScan
+            alias: cmv_basetable
+            Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((d = 3) and (3 = a)) (type: boolean)
+              Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: c (type: decimal(10,2))
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: decimal(10,2))
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: 3 (type: int), _col0 (type: decimal(10,2)), 3 (type: int), _col1 (type: decimal(10,2))
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 2 Data size: 66 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@cmv_basetable
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (
+  (select a, c from cmv_basetable where a = 3) table1
+  join
+  (select a, c from cmv_basetable where d = 3) table2
+  on table1.a = table2.a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@cmv_basetable
+#### A masked pattern was here ####
+3	9.80	3	978.76
+3	978.76	3	978.76

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/service/src/java/org/apache/hive/service/cli/operation/Operation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
index 0b27608..4e78551 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java
@@ -20,7 +20,6 @@ package org.apache.hive.service.cli.operation;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.util.EnumSet;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -63,7 +62,6 @@ public abstract class Operation {
   protected volatile Future<?> backgroundHandle;
   protected OperationLog operationLog;
   protected boolean isOperationLogEnabled;
-  protected Map<String, String> confOverlay = new HashMap<String, String>();
 
   private long operationTimeout;
   private volatile long lastAccessTime;
@@ -90,9 +88,6 @@ public abstract class Operation {
   protected Operation(HiveSession parentSession,
       Map<String, String> confOverlay, OperationType opType, boolean isAsyncQueryState) {
     this.parentSession = parentSession;
-    if (confOverlay != null) {
-      this.confOverlay = confOverlay;
-    }
     this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion());
     beginTime = System.currentTimeMillis();
     lastAccessTime = beginTime;
@@ -101,7 +96,12 @@ public abstract class Operation {
 
     currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX,
         MetricsConstant.COMPLETED_OPERATION_PREFIX, state);
-    queryState = new QueryState(parentSession.getHiveConf(), confOverlay, isAsyncQueryState);
+    queryState = new QueryState.Builder()
+                     .withConfOverlay(confOverlay)
+                     .withRunAsync(isAsyncQueryState)
+                     .withGenerateNewQueryId(true)
+                     .withHiveConf(parentSession.getHiveConf())
+                     .build();
   }
 
   public Future<?> getBackgroundHandle() {

http://git-wip-us.apache.org/repos/asf/hive/blob/86f74fdd/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 0b51591..1a2be8b 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -169,7 +169,7 @@ public class SQLOperation extends ExecuteStatementOperation {
           @Override
           public void run() {
             try {
-              String queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname);
+              String queryId = queryState.getQueryId();
               LOG.info("Query timed out after: " + queryTimeout
                   + " seconds. Cancelling the execution now: " + queryId);
               SQLOperation.this.cancel(OperationState.TIMEDOUT);
@@ -397,7 +397,7 @@ public class SQLOperation extends ExecuteStatementOperation {
       Future<?> backgroundHandle = getBackgroundHandle();
       if (backgroundHandle != null) {
         boolean success = backgroundHandle.cancel(true);
-        String queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname);
+        String queryId = queryState.getQueryId();
         if (success) {
           LOG.info("The running operation has been successfully interrupted: " + queryId);
         } else if (state == OperationState.CANCELED) {
@@ -430,7 +430,7 @@ public class SQLOperation extends ExecuteStatementOperation {
   public void cancel(OperationState stateAfterCancel) throws HiveSQLException {
     String queryId = null;
     if (stateAfterCancel == OperationState.CANCELED) {
-      queryId = confOverlay.get(HiveConf.ConfVars.HIVEQUERYID.varname);
+      queryId = queryState.getQueryId();
       LOG.info("Cancelling the query execution: " + queryId);
     }
     cleanup(stateAfterCancel);


[11/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query65.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query65.q.out b/ql/src/test/results/clientpositive/perf/query65.q.out
index db671aa..17d80d0 100644
--- a/ql/src/test/results/clientpositive/perf/query65.q.out
+++ b/ql/src/test/results/clientpositive/perf/query65.q.out
@@ -77,13 +77,13 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 12 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 13 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 4 <- Map 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 5 <- Map 11 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-Reducer 9 <- Map 11 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 7 <- Map 1 (SIMPLE_EDGE), Map 9 (SIMPLE_EDGE)
+Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -101,7 +101,7 @@ Stage-0
                 Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                 Merge Join Operator [MERGEJOIN_81] (rows=255550079 width=88)
                   Conds:RS_44._col1=RS_45._col0(Inner),Output:["_col2","_col6","_col8","_col9","_col10","_col11"]
-                <-Map 13 [SIMPLE_EDGE]
+                <-Map 11 [SIMPLE_EDGE]
                   SHUFFLE [RS_45]
                     PartitionCols:_col0
                     Select Operator [SEL_38] (rows=462000 width=1436)
@@ -117,7 +117,7 @@ Stage-0
                       predicate:(_col2 <= (0.1 * _col4))
                       Merge Join Operator [MERGEJOIN_80] (rows=696954748 width=88)
                         Conds:RS_39._col0=RS_40._col0(Inner),RS_39._col0=RS_41._col0(Inner),Output:["_col1","_col2","_col4","_col6"]
-                      <-Map 12 [SIMPLE_EDGE]
+                      <-Map 10 [SIMPLE_EDGE]
                         SHUFFLE [RS_41]
                           PartitionCols:_col0
                           Select Operator [SEL_35] (rows=1704 width=1910)
@@ -126,42 +126,6 @@ Stage-0
                               predicate:s_store_sk is not null
                               TableScan [TS_33] (rows=1704 width=1910)
                                 default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_store_name"]
-                      <-Reducer 10 [SIMPLE_EDGE]
-                        SHUFFLE [RS_40]
-                          PartitionCols:_col0
-                          Select Operator [SEL_32] (rows=158398803 width=88)
-                            Output:["_col0","_col1"]
-                            Group By Operator [GBY_31] (rows=158398803 width=88)
-                              Output:["_col0","_col1"],aggregations:["avg(_col2)"],keys:_col1
-                              Select Operator [SEL_27] (rows=316797606 width=88)
-                                Output:["_col1","_col2"]
-                                Group By Operator [GBY_26] (rows=316797606 width=88)
-                                  Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
-                                <-Reducer 9 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_25]
-                                    PartitionCols:_col0
-                                    Group By Operator [GBY_24] (rows=633595212 width=88)
-                                      Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col2, _col1
-                                      Merge Join Operator [MERGEJOIN_79] (rows=633595212 width=88)
-                                        Conds:RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 11 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_21]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_19] (rows=8116 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_75] (rows=8116 width=1119)
-                                              predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                              TableScan [TS_17] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
-                                      <-Map 8 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_20]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_16] (rows=575995635 width=88)
-                                            Output:["_col0","_col1","_col2","_col3"]
-                                            Filter Operator [FIL_74] (rows=575995635 width=88)
-                                              predicate:(ss_sold_date_sk is not null and ss_store_sk is not null)
-                                              TableScan [TS_14] (rows=575995635 width=88)
-                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_store_sk","ss_sales_price"]
                       <-Reducer 3 [SIMPLE_EDGE]
                         SHUFFLE [RS_39]
                           PartitionCols:_col0
@@ -183,7 +147,7 @@ Stage-0
                                         predicate:(ss_sold_date_sk is not null and ss_store_sk is not null and ss_item_sk is not null)
                                         TableScan [TS_0] (rows=575995635 width=88)
                                           default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_store_sk","ss_sales_price"]
-                                <-Map 7 [SIMPLE_EDGE]
+                                <-Map 9 [SIMPLE_EDGE]
                                   SHUFFLE [RS_7]
                                     PartitionCols:_col0
                                     Select Operator [SEL_5] (rows=8116 width=1119)
@@ -192,4 +156,38 @@ Stage-0
                                         predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
                                         TableScan [TS_3] (rows=73049 width=1119)
                                           default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
+                      <-Reducer 8 [SIMPLE_EDGE]
+                        SHUFFLE [RS_40]
+                          PartitionCols:_col0
+                          Select Operator [SEL_32] (rows=158398803 width=88)
+                            Output:["_col0","_col1"]
+                            Group By Operator [GBY_31] (rows=158398803 width=88)
+                              Output:["_col0","_col1"],aggregations:["avg(_col2)"],keys:_col1
+                              Select Operator [SEL_27] (rows=316797606 width=88)
+                                Output:["_col1","_col2"]
+                                Group By Operator [GBY_26] (rows=316797606 width=88)
+                                  Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
+                                <-Reducer 7 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_25]
+                                    PartitionCols:_col0
+                                    Group By Operator [GBY_24] (rows=633595212 width=88)
+                                      Output:["_col0","_col1","_col2"],aggregations:["sum(_col3)"],keys:_col2, _col1
+                                      Merge Join Operator [MERGEJOIN_79] (rows=633595212 width=88)
+                                        Conds:RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col2","_col3"]
+                                      <-Map 1 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_20]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_16] (rows=575995635 width=88)
+                                            Output:["_col0","_col1","_col2","_col3"]
+                                            Filter Operator [FIL_74] (rows=575995635 width=88)
+                                              predicate:(ss_sold_date_sk is not null and ss_store_sk is not null)
+                                               Please refer to the previous TableScan [TS_0]
+                                      <-Map 9 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_21]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_19] (rows=8116 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_75] (rows=8116 width=1119)
+                                              predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_3]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query66.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query66.q.out b/ql/src/test/results/clientpositive/perf/query66.q.out
index 072bfee..ec7b6af 100644
--- a/ql/src/test/results/clientpositive/perf/query66.q.out
+++ b/ql/src/test/results/clientpositive/perf/query66.q.out
@@ -439,15 +439,15 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 15 <- Map 14 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
-Reducer 16 <- Map 21 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
-Reducer 17 <- Map 22 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-Reducer 18 <- Map 23 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
-Reducer 19 <- Reducer 18 (SIMPLE_EDGE), Union 7 (CONTAINS)
+Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 19 (SIMPLE_EDGE)
+Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 13 <- Map 17 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 14 <- Map 18 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
+Reducer 15 <- Reducer 14 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 10 (SIMPLE_EDGE)
-Reducer 3 <- Map 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 12 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Map 13 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 3 <- Map 16 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 17 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 5 <- Map 18 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE), Union 7 (CONTAINS)
 Reducer 8 <- Union 7 (SIMPLE_EDGE)
 Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
@@ -469,7 +469,7 @@ Stage-0
                 Group By Operator [GBY_71] (rows=158120068 width=135)
                   Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29","_col30","_col31","_col32","_col33","_col34","_col35","_col36","_col37","_col38","_col39","_col40","_col41"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)","sum(VALUE._col3)","sum(VALUE._col4)","sum(VALUE._col5)","sum(VALUE._col6)","sum(VALUE._col7)","sum(VALUE._col8)","sum(VALUE._col9)","sum(VALUE._col10)","sum(VALUE._col11)","sum(VALUE._col12)","sum(VALUE._col13)","sum(VALUE._col14)","sum(VALUE._col15)","sum(VALUE._col16)","sum(VALUE._col17)","sum(VALUE._col18)","sum(VALUE._col19)","sum(VALUE._col20)","sum(VALUE._col21)","sum(VALUE._col22)","sum(VALUE._col23)","sum(VALUE._col24)","sum(VALUE._col25)","sum(VALUE._col26)","sum(VALUE._col27)","sum(VALUE._col28)","sum(VALUE._col29)
 ","sum(VALUE._col30)","sum(VALUE._col31)","sum(VALUE._col32)","sum(VALUE._col33)","sum(VALUE._col34)","sum(VALUE._col35)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3, KEY._col4, KEY._col5
                 <-Union 7 [SIMPLE_EDGE]
-                  <-Reducer 19 [CONTAINS]
+                  <-Reducer 15 [CONTAINS]
                     Reduce Output Operator [RS_70]
                       PartitionCols:_col0, _col1, _col2, _col3, _col4, _col5
                       Group By Operator [GBY_69] (rows=316240137 width=135)
@@ -478,7 +478,7 @@ Stage-0
                           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29","_col30","_col31","_col32","_col33","_col34","_col35","_col36","_col37","_col38","_col39","_col40","_col41"]
                           Group By Operator [GBY_64] (rows=210822976 width=135)
                             Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)","sum(VALUE._col3)","sum(VALUE._col4)","sum(VALUE._col5)","sum(VALUE._col6)","sum(VALUE._col7)","sum(VALUE._col8)","sum(VALUE._col9)","sum(VALUE._col10)","sum(VALUE._col11)","sum(VALUE._col12)","sum(VALUE._col13)","sum(VALUE._col14)","sum(VALUE._col15)","sum(VALUE._col16)","sum(VALUE._col17)","sum(VALUE._col18)","sum(VALUE._col19)","sum(VALUE._col20)","sum(VALUE._col21)","sum(VALUE._col22)","sum(VALUE._col23)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3, KEY._col4, KEY._col5
-                          <-Reducer 18 [SIMPLE_EDGE]
+                          <-Reducer 14 [SIMPLE_EDGE]
                             SHUFFLE [RS_63]
                               PartitionCols:_col0, _col1, _col2, _col3, _col4, _col5
                               Group By Operator [GBY_62] (rows=421645953 width=135)
@@ -487,49 +487,58 @@ Stage-0
                                   Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29"]
                                   Merge Join Operator [MERGEJOIN_122] (rows=421645953 width=135)
                                     Conds:RS_57._col3=RS_58._col0(Inner),Output:["_col4","_col5","_col6","_col11","_col15","_col16","_col17","_col18","_col19","_col20"]
-                                  <-Map 23 [SIMPLE_EDGE]
+                                  <-Map 18 [SIMPLE_EDGE]
                                     SHUFFLE [RS_58]
                                       PartitionCols:_col0
                                       Select Operator [SEL_47] (rows=27 width=1029)
                                         Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
                                         Filter Operator [FIL_114] (rows=27 width=1029)
                                           predicate:w_warehouse_sk is not null
-                                          TableScan [TS_45] (rows=27 width=1029)
+                                          TableScan [TS_12] (rows=27 width=1029)
                                             default@warehouse,warehouse,Tbl:COMPLETE,Col:NONE,Output:["w_warehouse_sk","w_warehouse_name","w_warehouse_sq_ft","w_city","w_county","w_state","w_country"]
-                                  <-Reducer 17 [SIMPLE_EDGE]
+                                  <-Reducer 13 [SIMPLE_EDGE]
                                     SHUFFLE [RS_57]
                                       PartitionCols:_col3
                                       Merge Join Operator [MERGEJOIN_121] (rows=383314495 width=135)
                                         Conds:RS_54._col2=RS_55._col0(Inner),Output:["_col3","_col4","_col5","_col6","_col11"]
-                                      <-Map 22 [SIMPLE_EDGE]
+                                      <-Map 17 [SIMPLE_EDGE]
                                         SHUFFLE [RS_55]
                                           PartitionCols:_col0
                                           Select Operator [SEL_44] (rows=1 width=0)
                                             Output:["_col0"]
                                             Filter Operator [FIL_113] (rows=1 width=0)
                                               predicate:((sm_carrier) IN ('DIAMOND', 'AIRBORNE') and sm_ship_mode_sk is not null)
-                                              TableScan [TS_42] (rows=1 width=0)
+                                              TableScan [TS_9] (rows=1 width=0)
                                                 default@ship_mode,ship_mode,Tbl:PARTIAL,Col:NONE,Output:["sm_ship_mode_sk","sm_carrier"]
-                                      <-Reducer 16 [SIMPLE_EDGE]
+                                      <-Reducer 12 [SIMPLE_EDGE]
                                         SHUFFLE [RS_54]
                                           PartitionCols:_col2
                                           Merge Join Operator [MERGEJOIN_120] (rows=348467716 width=135)
                                             Conds:RS_51._col0=RS_52._col0(Inner),Output:["_col2","_col3","_col4","_col5","_col6","_col11"]
-                                          <-Map 21 [SIMPLE_EDGE]
+                                          <-Map 16 [SIMPLE_EDGE]
                                             SHUFFLE [RS_52]
                                               PartitionCols:_col0
                                               Select Operator [SEL_41] (rows=36524 width=1119)
                                                 Output:["_col0","_col2"]
                                                 Filter Operator [FIL_112] (rows=36524 width=1119)
                                                   predicate:((d_year = 2002) and d_date_sk is not null)
-                                                  TableScan [TS_39] (rows=73049 width=1119)
+                                                  TableScan [TS_6] (rows=73049 width=1119)
                                                     default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                                          <-Reducer 15 [SIMPLE_EDGE]
+                                          <-Reducer 11 [SIMPLE_EDGE]
                                             SHUFFLE [RS_51]
                                               PartitionCols:_col0
                                               Merge Join Operator [MERGEJOIN_119] (rows=316788826 width=135)
                                                 Conds:RS_48._col1=RS_49._col0(Inner),Output:["_col0","_col2","_col3","_col4","_col5","_col6"]
-                                              <-Map 14 [SIMPLE_EDGE]
+                                              <-Map 10 [SIMPLE_EDGE]
+                                                SHUFFLE [RS_49]
+                                                  PartitionCols:_col0
+                                                  Select Operator [SEL_38] (rows=9600 width=471)
+                                                    Output:["_col0"]
+                                                    Filter Operator [FIL_111] (rows=9600 width=471)
+                                                      predicate:(t_time BETWEEN 49530 AND 78330 and t_time_sk is not null)
+                                                      TableScan [TS_3] (rows=86400 width=471)
+                                                        default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_time"]
+                                              <-Map 19 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_48]
                                                   PartitionCols:_col1
                                                   Select Operator [SEL_35] (rows=287989836 width=135)
@@ -538,15 +547,6 @@ Stage-0
                                                       predicate:(cs_warehouse_sk is not null and cs_sold_date_sk is not null and cs_sold_time_sk is not null and cs_ship_mode_sk is not null)
                                                       TableScan [TS_33] (rows=287989836 width=135)
                                                         default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_sold_time_sk","cs_ship_mode_sk","cs_warehouse_sk","cs_quantity","cs_ext_sales_price","cs_net_paid_inc_ship_tax"]
-                                              <-Map 20 [SIMPLE_EDGE]
-                                                SHUFFLE [RS_49]
-                                                  PartitionCols:_col0
-                                                  Select Operator [SEL_38] (rows=9600 width=471)
-                                                    Output:["_col0"]
-                                                    Filter Operator [FIL_111] (rows=9600 width=471)
-                                                      predicate:(t_time BETWEEN 49530 AND 78330 and t_time_sk is not null)
-                                                      TableScan [TS_36] (rows=86400 width=471)
-                                                        default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_time"]
                   <-Reducer 6 [CONTAINS]
                     Reduce Output Operator [RS_70]
                       PartitionCols:_col0, _col1, _col2, _col3, _col4, _col5
@@ -565,48 +565,53 @@ Stage-0
                                   Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23","_col24","_col25","_col26","_col27","_col28","_col29"]
                                   Merge Join Operator [MERGEJOIN_118] (rows=210834322 width=135)
                                     Conds:RS_24._col3=RS_25._col0(Inner),Output:["_col4","_col5","_col6","_col11","_col15","_col16","_col17","_col18","_col19","_col20"]
-                                  <-Map 13 [SIMPLE_EDGE]
+                                  <-Map 18 [SIMPLE_EDGE]
                                     SHUFFLE [RS_25]
                                       PartitionCols:_col0
                                       Select Operator [SEL_14] (rows=27 width=1029)
                                         Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"]
                                         Filter Operator [FIL_109] (rows=27 width=1029)
                                           predicate:w_warehouse_sk is not null
-                                          TableScan [TS_12] (rows=27 width=1029)
-                                            default@warehouse,warehouse,Tbl:COMPLETE,Col:NONE,Output:["w_warehouse_sk","w_warehouse_name","w_warehouse_sq_ft","w_city","w_county","w_state","w_country"]
+                                           Please refer to the previous TableScan [TS_12]
                                   <-Reducer 4 [SIMPLE_EDGE]
                                     SHUFFLE [RS_24]
                                       PartitionCols:_col3
                                       Merge Join Operator [MERGEJOIN_117] (rows=191667562 width=135)
                                         Conds:RS_21._col2=RS_22._col0(Inner),Output:["_col3","_col4","_col5","_col6","_col11"]
-                                      <-Map 12 [SIMPLE_EDGE]
+                                      <-Map 17 [SIMPLE_EDGE]
                                         SHUFFLE [RS_22]
                                           PartitionCols:_col0
                                           Select Operator [SEL_11] (rows=1 width=0)
                                             Output:["_col0"]
                                             Filter Operator [FIL_108] (rows=1 width=0)
                                               predicate:((sm_carrier) IN ('DIAMOND', 'AIRBORNE') and sm_ship_mode_sk is not null)
-                                              TableScan [TS_9] (rows=1 width=0)
-                                                default@ship_mode,ship_mode,Tbl:PARTIAL,Col:NONE,Output:["sm_ship_mode_sk","sm_carrier"]
+                                               Please refer to the previous TableScan [TS_9]
                                       <-Reducer 3 [SIMPLE_EDGE]
                                         SHUFFLE [RS_21]
                                           PartitionCols:_col2
                                           Merge Join Operator [MERGEJOIN_116] (rows=174243235 width=135)
                                             Conds:RS_18._col0=RS_19._col0(Inner),Output:["_col2","_col3","_col4","_col5","_col6","_col11"]
-                                          <-Map 11 [SIMPLE_EDGE]
+                                          <-Map 16 [SIMPLE_EDGE]
                                             SHUFFLE [RS_19]
                                               PartitionCols:_col0
                                               Select Operator [SEL_8] (rows=36524 width=1119)
                                                 Output:["_col0","_col2"]
                                                 Filter Operator [FIL_107] (rows=36524 width=1119)
                                                   predicate:((d_year = 2002) and d_date_sk is not null)
-                                                  TableScan [TS_6] (rows=73049 width=1119)
-                                                    default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                                   Please refer to the previous TableScan [TS_6]
                                           <-Reducer 2 [SIMPLE_EDGE]
                                             SHUFFLE [RS_18]
                                               PartitionCols:_col0
                                               Merge Join Operator [MERGEJOIN_115] (rows=158402938 width=135)
                                                 Conds:RS_15._col1=RS_16._col0(Inner),Output:["_col0","_col2","_col3","_col4","_col5","_col6"]
+                                              <-Map 10 [SIMPLE_EDGE]
+                                                SHUFFLE [RS_16]
+                                                  PartitionCols:_col0
+                                                  Select Operator [SEL_5] (rows=9600 width=471)
+                                                    Output:["_col0"]
+                                                    Filter Operator [FIL_106] (rows=9600 width=471)
+                                                      predicate:(t_time BETWEEN 49530 AND 78330 and t_time_sk is not null)
+                                                       Please refer to the previous TableScan [TS_3]
                                               <-Map 1 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_15]
                                                   PartitionCols:_col1
@@ -616,13 +621,4 @@ Stage-0
                                                       predicate:(ws_warehouse_sk is not null and ws_sold_date_sk is not null and ws_sold_time_sk is not null and ws_ship_mode_sk is not null)
                                                       TableScan [TS_0] (rows=144002668 width=135)
                                                         default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_sold_time_sk","ws_ship_mode_sk","ws_warehouse_sk","ws_quantity","ws_sales_price","ws_net_paid_inc_tax"]
-                                              <-Map 10 [SIMPLE_EDGE]
-                                                SHUFFLE [RS_16]
-                                                  PartitionCols:_col0
-                                                  Select Operator [SEL_5] (rows=9600 width=471)
-                                                    Output:["_col0"]
-                                                    Filter Operator [FIL_106] (rows=9600 width=471)
-                                                      predicate:(t_time BETWEEN 49530 AND 78330 and t_time_sk is not null)
-                                                      TableScan [TS_3] (rows=86400 width=471)
-                                                        default@time_dim,time_dim,Tbl:COMPLETE,Col:NONE,Output:["t_time_sk","t_time"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query68.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query68.q.out b/ql/src/test/results/clientpositive/perf/query68.q.out
index a015852..84f701b 100644
--- a/ql/src/test/results/clientpositive/perf/query68.q.out
+++ b/ql/src/test/results/clientpositive/perf/query68.q.out
@@ -5,14 +5,14 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Map 15 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+Reducer 10 <- Map 13 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Map 14 (SIMPLE_EDGE), Reducer 10 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE)
-Reducer 3 <- Reducer 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE)
-Reducer 7 <- Map 12 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE)
-Reducer 8 <- Map 13 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
-Reducer 9 <- Map 14 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
+Reducer 6 <- Map 5 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
+Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 9 <- Map 12 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -32,30 +32,52 @@ Stage-0
                   predicate:(_col5 <> _col8)
                   Merge Join Operator [MERGEJOIN_86] (rows=463823414 width=88)
                     Conds:RS_42._col0=RS_43._col1(Inner),Output:["_col2","_col3","_col5","_col6","_col8","_col9","_col10","_col11"]
-                  <-Reducer 11 [SIMPLE_EDGE]
+                  <-Reducer 2 [SIMPLE_EDGE]
+                    SHUFFLE [RS_42]
+                      PartitionCols:_col0
+                      Merge Join Operator [MERGEJOIN_81] (rows=88000001 width=860)
+                        Conds:RS_39._col1=RS_40._col0(Inner),Output:["_col0","_col2","_col3","_col5"]
+                      <-Map 5 [SIMPLE_EDGE]
+                        SHUFFLE [RS_40]
+                          PartitionCols:_col0
+                          Select Operator [SEL_5] (rows=40000000 width=1014)
+                            Output:["_col0","_col1"]
+                            Filter Operator [FIL_75] (rows=40000000 width=1014)
+                              predicate:ca_address_sk is not null
+                              TableScan [TS_3] (rows=40000000 width=1014)
+                                default@customer_address,current_addr,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_city"]
+                      <-Map 1 [SIMPLE_EDGE]
+                        SHUFFLE [RS_39]
+                          PartitionCols:_col1
+                          Select Operator [SEL_2] (rows=80000000 width=860)
+                            Output:["_col0","_col1","_col2","_col3"]
+                            Filter Operator [FIL_74] (rows=80000000 width=860)
+                              predicate:(c_customer_sk is not null and c_current_addr_sk is not null)
+                              TableScan [TS_0] (rows=80000000 width=860)
+                                default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_current_addr_sk","c_first_name","c_last_name"]
+                  <-Reducer 7 [SIMPLE_EDGE]
                     SHUFFLE [RS_43]
                       PartitionCols:_col1
                       Select Operator [SEL_37] (rows=421657640 width=88)
                         Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
                         Group By Operator [GBY_36] (rows=421657640 width=88)
                           Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)","sum(VALUE._col2)"],keys:KEY._col0, KEY._col1, KEY._col2, KEY._col3
-                        <-Reducer 10 [SIMPLE_EDGE]
+                        <-Reducer 6 [SIMPLE_EDGE]
                           SHUFFLE [RS_35]
                             PartitionCols:_col0, _col1, _col2, _col3
                             Group By Operator [GBY_34] (rows=843315281 width=88)
                               Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"],aggregations:["sum(_col6)","sum(_col7)","sum(_col8)"],keys:_col1, _col18, _col3, _col5
                               Merge Join Operator [MERGEJOIN_85] (rows=843315281 width=88)
                                 Conds:RS_30._col3=RS_31._col0(Inner),Output:["_col1","_col3","_col5","_col6","_col7","_col8","_col18"]
-                              <-Map 15 [SIMPLE_EDGE]
+                              <-Map 5 [SIMPLE_EDGE]
                                 SHUFFLE [RS_31]
                                   PartitionCols:_col0
                                   Select Operator [SEL_20] (rows=40000000 width=1014)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_80] (rows=40000000 width=1014)
                                       predicate:ca_address_sk is not null
-                                      TableScan [TS_18] (rows=40000000 width=1014)
-                                        default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_city"]
-                              <-Reducer 9 [SIMPLE_EDGE]
+                                       Please refer to the previous TableScan [TS_3]
+                              <-Reducer 11 [SIMPLE_EDGE]
                                 SHUFFLE [RS_30]
                                   PartitionCols:_col3
                                   Merge Join Operator [MERGEJOIN_84] (rows=766650239 width=88)
@@ -69,7 +91,7 @@ Stage-0
                                           predicate:(((hd_dep_count = 4) or (hd_vehicle_count = 2)) and hd_demo_sk is not null)
                                           TableScan [TS_15] (rows=7200 width=107)
                                             default@household_demographics,household_demographics,Tbl:COMPLETE,Col:NONE,Output:["hd_demo_sk","hd_dep_count","hd_vehicle_count"]
-                                  <-Reducer 8 [SIMPLE_EDGE]
+                                  <-Reducer 10 [SIMPLE_EDGE]
                                     SHUFFLE [RS_27]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_83] (rows=696954748 width=88)
@@ -83,7 +105,7 @@ Stage-0
                                               predicate:((s_city) IN ('Rosedale', 'Bethlehem') and s_store_sk is not null)
                                               TableScan [TS_12] (rows=1704 width=1910)
                                                 default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_city"]
-                                      <-Reducer 7 [SIMPLE_EDGE]
+                                      <-Reducer 9 [SIMPLE_EDGE]
                                         SHUFFLE [RS_24]
                                           PartitionCols:_col4
                                           Merge Join Operator [MERGEJOIN_82] (rows=633595212 width=88)
@@ -97,7 +119,7 @@ Stage-0
                                                   predicate:((d_year) IN (1998, 1999, 2000) and d_dom BETWEEN 1 AND 2 and d_date_sk is not null)
                                                   TableScan [TS_9] (rows=73049 width=1119)
                                                     default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_dom"]
-                                          <-Map 6 [SIMPLE_EDGE]
+                                          <-Map 8 [SIMPLE_EDGE]
                                             SHUFFLE [RS_21]
                                               PartitionCols:_col0
                                               Select Operator [SEL_8] (rows=575995635 width=88)
@@ -106,27 +128,4 @@ Stage-0
                                                   predicate:(ss_sold_date_sk is not null and ss_store_sk is not null and ss_hdemo_sk is not null and ss_addr_sk is not null and ss_customer_sk is not null)
                                                   TableScan [TS_6] (rows=575995635 width=88)
                                                     default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk","ss_hdemo_sk","ss_addr_sk","ss_store_sk","ss_ticket_number","ss_ext_sales_price","ss_ext_list_price","ss_ext_tax"]
-                  <-Reducer 2 [SIMPLE_EDGE]
-                    SHUFFLE [RS_42]
-                      PartitionCols:_col0
-                      Merge Join Operator [MERGEJOIN_81] (rows=88000001 width=860)
-                        Conds:RS_39._col1=RS_40._col0(Inner),Output:["_col0","_col2","_col3","_col5"]
-                      <-Map 1 [SIMPLE_EDGE]
-                        SHUFFLE [RS_39]
-                          PartitionCols:_col1
-                          Select Operator [SEL_2] (rows=80000000 width=860)
-                            Output:["_col0","_col1","_col2","_col3"]
-                            Filter Operator [FIL_74] (rows=80000000 width=860)
-                              predicate:(c_customer_sk is not null and c_current_addr_sk is not null)
-                              TableScan [TS_0] (rows=80000000 width=860)
-                                default@customer,customer,Tbl:COMPLETE,Col:NONE,Output:["c_customer_sk","c_current_addr_sk","c_first_name","c_last_name"]
-                      <-Map 5 [SIMPLE_EDGE]
-                        SHUFFLE [RS_40]
-                          PartitionCols:_col0
-                          Select Operator [SEL_5] (rows=40000000 width=1014)
-                            Output:["_col0","_col1"]
-                            Filter Operator [FIL_75] (rows=40000000 width=1014)
-                              predicate:ca_address_sk is not null
-                              TableScan [TS_3] (rows=40000000 width=1014)
-                                default@customer_address,current_addr,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_city"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query69.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query69.q.out b/ql/src/test/results/clientpositive/perf/query69.q.out
index 87087ac..7ee80a6 100644
--- a/ql/src/test/results/clientpositive/perf/query69.q.out
+++ b/ql/src/test/results/clientpositive/perf/query69.q.out
@@ -93,14 +93,14 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
 Reducer 12 <- Reducer 11 (SIMPLE_EDGE)
-Reducer 15 <- Map 14 (SIMPLE_EDGE), Map 17 (SIMPLE_EDGE)
-Reducer 16 <- Reducer 15 (SIMPLE_EDGE)
-Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 21 (SIMPLE_EDGE)
+Reducer 14 <- Map 13 (SIMPLE_EDGE), Map 18 (SIMPLE_EDGE)
+Reducer 15 <- Reducer 14 (SIMPLE_EDGE)
+Reducer 16 <- Map 13 (SIMPLE_EDGE), Map 19 (SIMPLE_EDGE)
+Reducer 17 <- Reducer 16 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
-Reducer 20 <- Reducer 19 (SIMPLE_EDGE)
 Reducer 3 <- Map 9 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 12 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
-Reducer 5 <- Reducer 20 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 12 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 5 <- Reducer 17 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
 
@@ -131,21 +131,30 @@ Stage-0
                           predicate:_col15 is null
                           Merge Join Operator [MERGEJOIN_114] (rows=383325119 width=88)
                             Conds:RS_64._col0=RS_65._col0(Left Outer),Output:["_col6","_col7","_col8","_col9","_col10","_col15"]
-                          <-Reducer 20 [SIMPLE_EDGE]
+                          <-Reducer 17 [SIMPLE_EDGE]
                             SHUFFLE [RS_65]
                               PartitionCols:_col0
                               Select Operator [SEL_63] (rows=158394413 width=135)
                                 Output:["_col0","_col1"]
                                 Group By Operator [GBY_62] (rows=158394413 width=135)
                                   Output:["_col0"],keys:KEY._col0
-                                <-Reducer 19 [SIMPLE_EDGE]
+                                <-Reducer 16 [SIMPLE_EDGE]
                                   SHUFFLE [RS_61]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_60] (rows=316788826 width=135)
                                       Output:["_col0"],keys:_col1
                                       Merge Join Operator [MERGEJOIN_112] (rows=316788826 width=135)
                                         Conds:RS_56._col0=RS_57._col0(Inner),Output:["_col1"]
-                                      <-Map 18 [SIMPLE_EDGE]
+                                      <-Map 13 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_57]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_55] (rows=4058 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_107] (rows=4058 width=1119)
+                                              predicate:((d_year = 1999) and d_moy BETWEEN 1 AND 3 and d_date_sk is not null)
+                                              TableScan [TS_12] (rows=73049 width=1119)
+                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                      <-Map 19 [SIMPLE_EDGE]
                                         SHUFFLE [RS_56]
                                           PartitionCols:_col0
                                           Select Operator [SEL_52] (rows=287989836 width=135)
@@ -154,15 +163,6 @@ Stage-0
                                               predicate:(cs_ship_customer_sk is not null and cs_sold_date_sk is not null)
                                               TableScan [TS_50] (rows=287989836 width=135)
                                                 default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_ship_customer_sk"]
-                                      <-Map 21 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_57]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_55] (rows=4058 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_107] (rows=4058 width=1119)
-                                              predicate:((d_year = 1999) and d_moy BETWEEN 1 AND 3 and d_date_sk is not null)
-                                              TableScan [TS_53] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                           <-Reducer 4 [SIMPLE_EDGE]
                             SHUFFLE [RS_64]
                               PartitionCols:_col0
@@ -188,6 +188,14 @@ Stage-0
                                                 Output:["_col0"],keys:_col1
                                                 Merge Join Operator [MERGEJOIN_110] (rows=158402938 width=135)
                                                   Conds:RS_15._col0=RS_16._col0(Inner),Output:["_col1"]
+                                                <-Map 13 [SIMPLE_EDGE]
+                                                  SHUFFLE [RS_16]
+                                                    PartitionCols:_col0
+                                                    Select Operator [SEL_14] (rows=4058 width=1119)
+                                                      Output:["_col0"]
+                                                      Filter Operator [FIL_103] (rows=4058 width=1119)
+                                                        predicate:((d_year = 1999) and d_moy BETWEEN 1 AND 3 and d_date_sk is not null)
+                                                         Please refer to the previous TableScan [TS_12]
                                                 <-Map 10 [SIMPLE_EDGE]
                                                   SHUFFLE [RS_15]
                                                     PartitionCols:_col0
@@ -197,28 +205,27 @@ Stage-0
                                                         predicate:(ws_bill_customer_sk is not null and ws_sold_date_sk is not null)
                                                         TableScan [TS_9] (rows=144002668 width=135)
                                                           default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_bill_customer_sk"]
-                                                <-Map 13 [SIMPLE_EDGE]
-                                                  SHUFFLE [RS_16]
-                                                    PartitionCols:_col0
-                                                    Select Operator [SEL_14] (rows=4058 width=1119)
-                                                      Output:["_col0"]
-                                                      Filter Operator [FIL_103] (rows=4058 width=1119)
-                                                        predicate:((d_year = 1999) and d_moy BETWEEN 1 AND 3 and d_date_sk is not null)
-                                                        TableScan [TS_12] (rows=73049 width=1119)
-                                                          default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                                    <-Reducer 16 [SIMPLE_EDGE]
+                                    <-Reducer 15 [SIMPLE_EDGE]
                                       SHUFFLE [RS_45]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_35] (rows=316797606 width=88)
                                           Output:["_col0"],keys:KEY._col0
-                                        <-Reducer 15 [SIMPLE_EDGE]
+                                        <-Reducer 14 [SIMPLE_EDGE]
                                           SHUFFLE [RS_34]
                                             PartitionCols:_col0
                                             Group By Operator [GBY_33] (rows=633595212 width=88)
                                               Output:["_col0"],keys:_col1
                                               Merge Join Operator [MERGEJOIN_111] (rows=633595212 width=88)
                                                 Conds:RS_29._col0=RS_30._col0(Inner),Output:["_col1"]
-                                              <-Map 14 [SIMPLE_EDGE]
+                                              <-Map 13 [SIMPLE_EDGE]
+                                                SHUFFLE [RS_30]
+                                                  PartitionCols:_col0
+                                                  Select Operator [SEL_28] (rows=4058 width=1119)
+                                                    Output:["_col0"]
+                                                    Filter Operator [FIL_105] (rows=4058 width=1119)
+                                                      predicate:((d_year = 1999) and d_moy BETWEEN 1 AND 3 and d_date_sk is not null)
+                                                       Please refer to the previous TableScan [TS_12]
+                                              <-Map 18 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_29]
                                                   PartitionCols:_col0
                                                   Select Operator [SEL_25] (rows=575995635 width=88)
@@ -227,15 +234,6 @@ Stage-0
                                                       predicate:(ss_customer_sk is not null and ss_sold_date_sk is not null)
                                                       TableScan [TS_23] (rows=575995635 width=88)
                                                         default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_customer_sk"]
-                                              <-Map 17 [SIMPLE_EDGE]
-                                                SHUFFLE [RS_30]
-                                                  PartitionCols:_col0
-                                                  Select Operator [SEL_28] (rows=4058 width=1119)
-                                                    Output:["_col0"]
-                                                    Filter Operator [FIL_105] (rows=4058 width=1119)
-                                                      predicate:((d_year = 1999) and d_moy BETWEEN 1 AND 3 and d_date_sk is not null)
-                                                      TableScan [TS_26] (rows=73049 width=1119)
-                                                        default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                                     <-Reducer 3 [SIMPLE_EDGE]
                                       SHUFFLE [RS_43]
                                         PartitionCols:_col0

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query70.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query70.q.out b/ql/src/test/results/clientpositive/perf/query70.q.out
index 8e42fac..55c1461 100644
--- a/ql/src/test/results/clientpositive/perf/query70.q.out
+++ b/ql/src/test/results/clientpositive/perf/query70.q.out
@@ -75,16 +75,16 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 15 (SIMPLE_EDGE)
-Reducer 12 <- Map 16 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 13 <- Reducer 12 (SIMPLE_EDGE)
-Reducer 14 <- Reducer 13 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
-Reducer 3 <- Map 9 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 14 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
+Reducer 3 <- Map 13 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 11 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Map 12 (SIMPLE_EDGE)
+Reducer 9 <- Map 14 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -120,7 +120,7 @@ Stage-0
                                 Output:["_col0","_col1","_col2"]
                                 Merge Join Operator [MERGEJOIN_88] (rows=766650239 width=88)
                                   Conds:RS_43._col7=RS_44._col0(Inner),Output:["_col2","_col6","_col7"]
-                                <-Reducer 14 [SIMPLE_EDGE]
+                                <-Reducer 11 [SIMPLE_EDGE]
                                   SHUFFLE [RS_44]
                                     PartitionCols:_col0
                                     Select Operator [SEL_32] (rows=116159124 width=88)
@@ -131,19 +131,19 @@ Stage-0
                                           Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 DESC NULLS LAST","partition by:":"_col0"}]
                                           Select Operator [SEL_30] (rows=348477374 width=88)
                                             Output:["_col0","_col1"]
-                                          <-Reducer 13 [SIMPLE_EDGE]
+                                          <-Reducer 10 [SIMPLE_EDGE]
                                             SHUFFLE [RS_29]
                                               PartitionCols:_col0
                                               Group By Operator [GBY_27] (rows=348477374 width=88)
                                                 Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                                              <-Reducer 12 [SIMPLE_EDGE]
+                                              <-Reducer 9 [SIMPLE_EDGE]
                                                 SHUFFLE [RS_26]
                                                   PartitionCols:_col0
                                                   Group By Operator [GBY_25] (rows=696954748 width=88)
                                                     Output:["_col0","_col1"],aggregations:["sum(_col2)"],keys:_col6
                                                     Merge Join Operator [MERGEJOIN_87] (rows=696954748 width=88)
                                                       Conds:RS_21._col1=RS_22._col0(Inner),Output:["_col2","_col6"]
-                                                    <-Map 16 [SIMPLE_EDGE]
+                                                    <-Map 14 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_22]
                                                         PartitionCols:_col0
                                                         Select Operator [SEL_17] (rows=1704 width=1910)
@@ -152,35 +152,35 @@ Stage-0
                                                             predicate:(s_store_sk is not null and s_state is not null)
                                                             TableScan [TS_15] (rows=1704 width=1910)
                                                               default@store,store,Tbl:COMPLETE,Col:NONE,Output:["s_store_sk","s_state"]
-                                                    <-Reducer 11 [SIMPLE_EDGE]
+                                                    <-Reducer 8 [SIMPLE_EDGE]
                                                       SHUFFLE [RS_21]
                                                         PartitionCols:_col1
                                                         Merge Join Operator [MERGEJOIN_86] (rows=633595212 width=88)
                                                           Conds:RS_18._col0=RS_19._col0(Inner),Output:["_col1","_col2"]
-                                                        <-Map 10 [SIMPLE_EDGE]
+                                                        <-Map 1 [SIMPLE_EDGE]
                                                           SHUFFLE [RS_18]
                                                             PartitionCols:_col0
                                                             Select Operator [SEL_11] (rows=575995635 width=88)
                                                               Output:["_col0","_col1","_col2"]
                                                               Filter Operator [FIL_81] (rows=575995635 width=88)
                                                                 predicate:(ss_store_sk is not null and ss_sold_date_sk is not null)
-                                                                TableScan [TS_9] (rows=575995635 width=88)
+                                                                TableScan [TS_0] (rows=575995635 width=88)
                                                                   default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_store_sk","ss_net_profit"]
-                                                        <-Map 15 [SIMPLE_EDGE]
+                                                        <-Map 12 [SIMPLE_EDGE]
                                                           SHUFFLE [RS_19]
                                                             PartitionCols:_col0
                                                             Select Operator [SEL_14] (rows=8116 width=1119)
                                                               Output:["_col0"]
                                                               Filter Operator [FIL_82] (rows=8116 width=1119)
                                                                 predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                                TableScan [TS_12] (rows=73049 width=1119)
-                                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
+                                                                TableScan [TS_3] (rows=73049 width=1119)
+                                                                  default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
                                 <-Reducer 3 [SIMPLE_EDGE]
                                   SHUFFLE [RS_43]
                                     PartitionCols:_col7
                                     Merge Join Operator [MERGEJOIN_85] (rows=696954748 width=88)
                                       Conds:RS_40._col1=RS_41._col0(Inner),Output:["_col2","_col6","_col7"]
-                                    <-Map 9 [SIMPLE_EDGE]
+                                    <-Map 13 [SIMPLE_EDGE]
                                       SHUFFLE [RS_41]
                                         PartitionCols:_col0
                                         Select Operator [SEL_8] (rows=1704 width=1910)
@@ -201,15 +201,13 @@ Stage-0
                                               Output:["_col0","_col1","_col2"]
                                               Filter Operator [FIL_77] (rows=575995635 width=88)
                                                 predicate:(ss_sold_date_sk is not null and ss_store_sk is not null)
-                                                TableScan [TS_0] (rows=575995635 width=88)
-                                                  default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_store_sk","ss_net_profit"]
-                                        <-Map 8 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_0]
+                                        <-Map 12 [SIMPLE_EDGE]
                                           SHUFFLE [RS_38]
                                             PartitionCols:_col0
                                             Select Operator [SEL_5] (rows=8116 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_78] (rows=8116 width=1119)
                                                 predicate:(d_month_seq BETWEEN 1212 AND 1223 and d_date_sk is not null)
-                                                TableScan [TS_3] (rows=73049 width=1119)
-                                                  default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_month_seq"]
+                                                 Please refer to the previous TableScan [TS_3]
 


[22/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_null_agg.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_null_agg.q.out b/ql/src/test/results/clientpositive/llap/subquery_null_agg.q.out
index 7d9d77c..3435bd9 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_null_agg.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_null_agg.q.out
@@ -55,10 +55,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Map 1 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -71,13 +71,6 @@ STAGE PLANS:
                     Reduce Output Operator
                       sort order: 
                       Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: table_7
-                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
                     Filter Operator
@@ -92,13 +85,6 @@ STAGE PLANS:
                           sort order: 
                           Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                           value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: table_7
-                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
                   Select Operator
                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
                     Filter Operator
@@ -156,7 +142,7 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -175,7 +161,7 @@ STAGE PLANS:
                       sort order: 
                       Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                       value expressions: _col0 (type: bigint)
-        Reducer 7 
+        Reducer 5 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/llap/subquery_scalar.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/subquery_scalar.q.out b/ql/src/test/results/clientpositive/llap/subquery_scalar.q.out
index c8524df..1563e1f 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_scalar.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_scalar.q.out
@@ -335,7 +335,7 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 5 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
         Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
@@ -376,13 +376,6 @@ STAGE PLANS:
                           sort order: 
                           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                           value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: part_null
-                  Statistics: Num rows: 32 Data size: 3256 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is null (type: boolean)
                     Statistics: Num rows: 16 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
@@ -799,9 +792,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
         Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (CUSTOM_SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -837,13 +830,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_size (type: int)
                     outputColumnNames: p_size
@@ -911,7 +897,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: int)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -973,12 +959,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
         Reducer 4 <- Map 3 (SIMPLE_EDGE)
         Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
         Reducer 6 <- Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
-        Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
+        Reducer 7 <- Map 3 (SIMPLE_EDGE)
+        Reducer 8 <- Reducer 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -1006,13 +992,6 @@ STAGE PLANS:
                     sort order: ++
                     Map-reduce partition columns: p_mfgr (type: string)
                     Statistics: Num rows: 26 Data size: 2652 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2652 Basic stats: COMPLETE Column stats: COMPLETE
                   Reduce Output Operator
                     key expressions: p_mfgr (type: string), p_size (type: int)
                     sort order: ++
@@ -1115,7 +1094,7 @@ STAGE PLANS:
                     Reduce Output Operator
                       sort order: 
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -1151,7 +1130,7 @@ STAGE PLANS:
                       sort order: +
                       Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE
                       TopN Hash Memory Usage: 0.1
-        Reducer 9 
+        Reducer 8 
             Execution mode: llap
             Reduce Operator Tree:
               Select Operator
@@ -2324,10 +2303,10 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 11 <- Map 10 (SIMPLE_EDGE)
+        Reducer 10 <- Map 7 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 11 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Reducer 10 (CUSTOM_SIMPLE_EDGE), Reducer 3 (CUSTOM_SIMPLE_EDGE), Reducer 9 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
         Reducer 8 <- Map 7 (SIMPLE_EDGE)
         Reducer 9 <- Reducer 8 (CUSTOM_SIMPLE_EDGE)
@@ -2348,30 +2327,6 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: (key = '90') (type: boolean)
-                    Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
-                      Group By Operator
-                        aggregations: count()
-                        keys: '90' (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
-                          value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
         Map 5 
             Map Operator Tree:
                 TableScan
@@ -2412,9 +2367,26 @@ STAGE PLANS:
                           sort order: +
                           Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 1 Data size: 86 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (key = '90') (type: boolean)
+                    Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: '90' (type: string)
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 11 
+        Reducer 10 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2723,18 +2695,18 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (CUSTOM_SIMPLE_EDGE)
-        Reducer 11 <- Reducer 10 (SIMPLE_EDGE), Union 6 (CONTAINS)
-        Reducer 13 <- Map 12 (CUSTOM_SIMPLE_EDGE)
-        Reducer 14 <- Reducer 13 (SIMPLE_EDGE), Union 15 (CONTAINS)
-        Reducer 16 <- Union 15 (SIMPLE_EDGE)
-        Reducer 18 <- Map 17 (CUSTOM_SIMPLE_EDGE)
-        Reducer 19 <- Reducer 18 (SIMPLE_EDGE), Union 15 (CONTAINS)
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 16 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 10 <- Reducer 9 (SIMPLE_EDGE), Union 11 (CONTAINS)
+        Reducer 12 <- Union 11 (SIMPLE_EDGE)
+        Reducer 14 <- Map 13 (CUSTOM_SIMPLE_EDGE)
+        Reducer 15 <- Reducer 14 (SIMPLE_EDGE), Union 6 (CONTAINS)
+        Reducer 16 <- Map 13 (CUSTOM_SIMPLE_EDGE)
+        Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Union 11 (CONTAINS)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 12 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
         Reducer 4 <- Map 3 (CUSTOM_SIMPLE_EDGE)
         Reducer 5 <- Reducer 4 (SIMPLE_EDGE), Union 6 (CONTAINS)
         Reducer 7 <- Union 6 (SIMPLE_EDGE)
         Reducer 8 <- Reducer 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 9 <- Map 3 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -2752,17 +2724,17 @@ STAGE PLANS:
                       value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Map 12 
+        Map 13 
             Map Operator Tree:
                 TableScan
                   alias: part
-                  Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 26 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: p_name (type: string)
-                    outputColumnNames: p_name
-                    Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
+                    expressions: p_brand (type: string)
+                    outputColumnNames: p_brand
+                    Statistics: Num rows: 26 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: count(p_name)
+                      aggregations: count(p_brand)
                       mode: hash
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2770,13 +2742,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 17 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: p_brand (type: string)
                     outputColumnNames: p_brand
@@ -2810,19 +2775,12 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: p_brand (type: string)
-                    outputColumnNames: p_brand
-                    Statistics: Num rows: 26 Data size: 2392 Basic stats: COMPLETE Column stats: COMPLETE
+                    expressions: p_name (type: string)
+                    outputColumnNames: p_name
+                    Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
-                      aggregations: count(p_brand)
+                      aggregations: count(p_name)
                       mode: hash
                       outputColumnNames: _col0
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2837,11 +2795,12 @@ STAGE PLANS:
             Reduce Operator Tree:
               Group By Operator
                 aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: bigint)
                 mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                 Group By Operator
-                  aggregations: count(1)
+                  aggregations: count(_col1)
                   keys: _col0 (type: bigint)
                   mode: hash
                   outputColumnNames: _col0, _col1
@@ -2852,7 +2811,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: bigint)
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 11 
+        Reducer 12 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2861,19 +2820,18 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: count(_col1)
-                  keys: _col0 (type: bigint)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
+                Filter Operator
+                  predicate: (_col1 = 2) (type: boolean)
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: bigint)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: bigint)
-                    Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: bigint)
-        Reducer 13 
+                  Select Operator
+                    expressions: _col0 (type: bigint)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: bigint)
+        Reducer 14 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2893,7 +2851,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: bigint)
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 14 
+        Reducer 15 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -2919,26 +2877,6 @@ STAGE PLANS:
             Reduce Operator Tree:
               Group By Operator
                 aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: bigint)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: (_col1 = 2) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: _col0 (type: bigint)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: bigint)
-        Reducer 18 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
                 mode: mergepartial
                 outputColumnNames: _col0
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
@@ -2954,7 +2892,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: bigint)
                     Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: bigint)
-        Reducer 19 
+        Reducer 17 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3086,8 +3024,28 @@ STAGE PLANS:
                     Reduce Output Operator
                       sort order: 
                       Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-        Union 15 
-            Vertex: Union 15
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: count(1)
+                  keys: _col0 (type: bigint)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: bigint)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: bigint)
+                    Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: bigint)
+        Union 11 
+            Vertex: Union 11
         Union 6 
             Vertex: Union 6
 
@@ -3307,12 +3265,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
         Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 9 <- Map 7 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3368,13 +3326,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 9 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     keys: p_name (type: string)
                     mode: hash
@@ -3387,24 +3338,6 @@ STAGE PLANS:
                       Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: string), true (type: boolean)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 13 Data size: 1625 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 1625 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -3493,6 +3426,24 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col0 (type: string), true (type: boolean)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 13 Data size: 1625 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 13 Data size: 1625 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -3551,12 +3502,12 @@ STAGE PLANS:
     Tez
 #### A masked pattern was here ####
       Edges:
-        Reducer 10 <- Map 9 (SIMPLE_EDGE)
         Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Reducer 6 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
         Reducer 6 <- Map 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (SIMPLE_EDGE)
+        Reducer 9 <- Map 8 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -3592,13 +3543,6 @@ STAGE PLANS:
                         sort order: 
                         Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col0 (type: bigint), _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: part
-                  Statistics: Num rows: 26 Data size: 3146 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
                     keys: p_name (type: string)
                     mode: hash
@@ -3611,7 +3555,7 @@ STAGE PLANS:
                       Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
-        Map 9 
+        Map 8 
             Map Operator Tree:
                 TableScan
                   alias: pp
@@ -3633,25 +3577,6 @@ STAGE PLANS:
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
-        Reducer 10 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: min(VALUE._col0)
-                keys: KEY._col0 (type: string)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 8 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col1 (type: string), true (type: boolean), _col0 (type: string)
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 8 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col2 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col2 (type: string)
-                    Statistics: Num rows: 8 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: string), _col1 (type: boolean)
         Reducer 2 
             Execution mode: llap
             Reduce Operator Tree:
@@ -3730,7 +3655,7 @@ STAGE PLANS:
                   sort order: 
                   Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: bigint), _col1 (type: bigint)
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -3748,6 +3673,25 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 1625 Basic stats: COMPLETE Column stats: COMPLETE
                     value expressions: _col1 (type: boolean)
+        Reducer 9 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 8 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col1 (type: string), true (type: boolean), _col0 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 8 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col2 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col2 (type: string)
+                    Statistics: Num rows: 8 Data size: 1628 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col0 (type: string), _col1 (type: boolean)
 
   Stage: Stage-0
     Fetch Operator
@@ -4907,9 +4851,9 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 7 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 5 <- Map 4 (SIMPLE_EDGE)
-        Reducer 7 <- Map 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 4 (SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -4949,13 +4893,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: depts
-                  Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: deptno is not null (type: boolean)
                     Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
@@ -5041,7 +4978,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col2 (type: string)
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col0 (type: bigint), _col1 (type: boolean)
-        Reducer 7 
+        Reducer 6 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5301,8 +5238,8 @@ STAGE PLANS:
 #### A masked pattern was here ####
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 5 (CUSTOM_SIMPLE_EDGE)
-        Reducer 5 <- Map 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE), Reducer 4 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5326,13 +5263,6 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key > '9') (type: boolean)
                     Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
@@ -5387,7 +5317,7 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
+        Reducer 4 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator
@@ -5457,9 +5387,9 @@ STAGE PLANS:
       Edges:
         Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
         Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-        Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE), Reducer 8 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE), Reducer 7 (CUSTOM_SIMPLE_EDGE)
         Reducer 6 <- Map 5 (SIMPLE_EDGE)
-        Reducer 8 <- Map 7 (CUSTOM_SIMPLE_EDGE)
+        Reducer 7 <- Map 5 (CUSTOM_SIMPLE_EDGE)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -5500,13 +5430,6 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 69 Data size: 6003 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: llap
-            LLAP IO: no inputs
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (key > '9') (type: boolean)
                     Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
@@ -5597,7 +5520,7 @@ STAGE PLANS:
                   sort order: +
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 69 Data size: 6003 Basic stats: COMPLETE Column stats: COMPLETE
-        Reducer 8 
+        Reducer 7 
             Execution mode: llap
             Reduce Operator Tree:
               Group By Operator


[46/50] [abbrv] hive git commit: HIVE-15483: Database and table name is case sensitive when used in show grant (Niklaus Xiao via Zoltan Haindrich)

Posted by we...@apache.org.
HIVE-15483: Database and table name is case sensitive when used in show grant (Niklaus Xiao via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/360a91e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/360a91e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/360a91e6

Branch: refs/heads/hive-14535
Commit: 360a91e64590940641aff9304eb0275cbfd82e39
Parents: 91948ec
Author: Niklaus Xiao <st...@live.cn>
Authored: Tue May 16 08:39:16 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Tue May 16 08:39:16 2017 +0200

----------------------------------------------------------------------
 .../src/java/org/apache/hadoop/hive/metastore/ObjectStore.java   | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/360a91e6/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index ee48617..b28983f 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -5953,6 +5953,8 @@ public class ObjectStore implements RawStore, Configurable {
   public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) {
     boolean success = false;
     Query query = null;
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
     try {
       openTransaction();
       LOG.debug("Executing listTableGrantsAll");
@@ -6153,6 +6155,8 @@ public class ObjectStore implements RawStore, Configurable {
       String columnName) {
     boolean success = false;
     Query query = null;
+    dbName = HiveStringUtils.normalizeIdentifier(dbName);
+    tableName = HiveStringUtils.normalizeIdentifier(tableName);
     try {
       openTransaction();
       LOG.debug("Executing listPrincipalTableColumnGrantsAll");


[34/50] [abbrv] hive git commit: HIVE-1010: Implement INFORMATION_SCHEMA in Hive (Gunther Hagleitner, reviewed by Thejas Nair)

Posted by we...@apache.org.
HIVE-1010: Implement INFORMATION_SCHEMA in Hive (Gunther Hagleitner, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/77f44b66
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/77f44b66
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/77f44b66

Branch: refs/heads/hive-14535
Commit: 77f44b66d188aed676014c226926d647ebec28d3
Parents: 7260420
Author: Gunther Hagleitner <gu...@apache.org>
Authored: Mon May 15 08:19:43 2017 -0700
Committer: Gunther Hagleitner <gu...@apache.org>
Committed: Mon May 15 08:19:43 2017 -0700

----------------------------------------------------------------------
 .../apache/hive/beeline/HiveSchemaHelper.java   |  108 +-
 .../org/apache/hive/beeline/HiveSchemaTool.java |  102 +-
 .../apache/hive/beeline/TestHiveSchemaTool.java |    4 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |    2 +
 .../hadoop/hive/druid/DruidStorageHandler.java  |    6 +
 .../org/apache/hive/beeline/TestSchemaTool.java |    4 +-
 .../test/resources/testconfiguration.properties |    1 +
 .../hive/storage/jdbc/JdbcInputFormat.java      |   15 +-
 .../hive/storage/jdbc/JdbcRecordReader.java     |    4 +-
 .../org/apache/hive/storage/jdbc/JdbcSerDe.java |    4 +-
 .../hive/storage/jdbc/JdbcStorageHandler.java   |   39 +-
 .../hive/storage/jdbc/conf/DatabaseType.java    |    6 +-
 .../jdbc/conf/JdbcStorageConfigManager.java     |   98 +-
 .../jdbc/dao/GenericJdbcDatabaseAccessor.java   |   18 +-
 .../storage/jdbc/dao/JdbcRecordIterator.java    |    3 -
 .../storage/jdbc/dao/MySqlDatabaseAccessor.java |    2 +-
 .../config/JdbcStorageConfigManagerTest.java    |   12 +-
 .../hadoop/hive/llap/cli/LlapServiceDriver.java |    4 +-
 .../upgrade/hive/hive-schema-3.0.0.hive.sql     | 1223 +++++++
 .../hadoop/hive/ql/exec/FileSinkOperator.java   |   22 +-
 .../apache/hadoop/hive/ql/exec/Utilities.java   |   46 +-
 .../hadoop/hive/ql/exec/tez/DagUtils.java       |    3 +-
 .../hive/ql/index/HiveIndexedInputFormat.java   |    7 +-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |   16 +-
 .../hive/ql/io/parquet/ProjectionPusher.java    |    9 +-
 .../hive/ql/metadata/DefaultStorageHandler.java |    5 +
 .../hive/ql/metadata/HiveStorageHandler.java    |    6 +
 .../hadoop/hive/ql/parse/UnparseTranslator.java |    1 -
 .../hadoop/hive/ql/plan/PartitionDesc.java      |    5 +-
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   |   15 +
 .../apache/hadoop/hive/ql/plan/TableDesc.java   |    9 +
 .../hadoop/hive/ql/ppd/OpProcFactory.java       |   20 +-
 .../hive/ql/exec/InputEstimatorTestClass.java   |    5 +
 .../test/queries/clientpositive/jdbc_handler.q  |   23 +-
 ql/src/test/queries/clientpositive/sysdb.q      |  117 +
 .../clientpositive/llap/jdbc_handler.q.out      |   42 +-
 .../results/clientpositive/llap/sysdb.q.out     | 3447 ++++++++++++++++++
 37 files changed, 5308 insertions(+), 145 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java
index 711f6a8..a4ecc08 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaHelper.java
@@ -34,6 +34,7 @@ import java.util.List;
 
 public class HiveSchemaHelper {
   public static final String DB_DERBY = "derby";
+  public static final String DB_HIVE = "hive";
   public static final String DB_MSSQL = "mssql";
   public static final String DB_MYSQL = "mysql";
   public static final String DB_POSTGRACE = "postgres";
@@ -50,15 +51,16 @@ public class HiveSchemaHelper {
    * @throws org.apache.hadoop.hive.metastore.api.MetaException
    */
   public static Connection getConnectionToMetastore(String userName,
-      String password, boolean printInfo, HiveConf hiveConf)
+      String password, String url, String driver, boolean printInfo,
+      HiveConf hiveConf)
       throws HiveMetaException {
     try {
-      String connectionURL = getValidConfVar(
-          HiveConf.ConfVars.METASTORECONNECTURLKEY, hiveConf);
-      String driver = getValidConfVar(
-          HiveConf.ConfVars.METASTORE_CONNECTION_DRIVER, hiveConf);
+      url = url == null ? getValidConfVar(
+        HiveConf.ConfVars.METASTORECONNECTURLKEY, hiveConf) : url;
+      driver = driver == null ? getValidConfVar(
+        HiveConf.ConfVars.METASTORE_CONNECTION_DRIVER, hiveConf) : driver;
       if (printInfo) {
-        System.out.println("Metastore connection URL:\t " + connectionURL);
+        System.out.println("Metastore connection URL:\t " + url);
         System.out.println("Metastore Connection Driver :\t " + driver);
         System.out.println("Metastore connection User:\t " + userName);
       }
@@ -70,7 +72,7 @@ public class HiveSchemaHelper {
       Class.forName(driver);
 
       // Connect using the JDBC URL and user/pass from conf
-      return DriverManager.getConnection(connectionURL, userName, password);
+      return DriverManager.getConnection(url, userName, password);
     } catch (IOException e) {
       throw new HiveMetaException("Failed to get schema version.", e);
     } catch (SQLException e) {
@@ -97,7 +99,8 @@ public class HiveSchemaHelper {
       COMMENT
     }
 
-    static final String DEFAUTL_DELIMITER = ";";
+    static final String DEFAULT_DELIMITER = ";";
+    static final String DEFAULT_QUOTE = "\"";
 
     /**
      * Find the type of given command
@@ -140,6 +143,13 @@ public class HiveSchemaHelper {
     public String getDelimiter();
 
     /**
+     * Get the SQL indentifier quotation character
+     *
+     * @return
+     */
+    public String getQuoteCharacter();
+
+    /**
      * Clear any client specific tags
      *
      * @return
@@ -162,6 +172,17 @@ public class HiveSchemaHelper {
      */
     public String buildCommand(String scriptDir, String scriptFile)
         throws IllegalFormatException, IOException;
+
+    /**
+     * Flatten the nested upgrade script into a buffer
+     *
+     * @param scriptDir  upgrade script directory
+     * @param scriptFile upgrade script file
+     * @param fixQuotes whether to replace quote characters
+     * @return string of sql commands
+     */
+    public String buildCommand(String scriptDir, String scriptFile, boolean fixQuotes)
+        throws IllegalFormatException, IOException;
   }
 
   /***
@@ -203,10 +224,16 @@ public class HiveSchemaHelper {
 
     @Override
     public String getDelimiter() {
-      return DEFAUTL_DELIMITER;
+      return DEFAULT_DELIMITER;
     }
 
     @Override
+    public String getQuoteCharacter() {
+      return DEFAULT_QUOTE;
+    }
+
+
+    @Override
     public String cleanseCommand(String dbCommand) {
       // strip off the delimiter
       if (dbCommand.endsWith(getDelimiter())) {
@@ -224,6 +251,12 @@ public class HiveSchemaHelper {
     @Override
     public String buildCommand(
       String scriptDir, String scriptFile) throws IllegalFormatException, IOException {
+      return buildCommand(scriptDir, scriptFile, false);
+    }
+
+    @Override
+    public String buildCommand(
+      String scriptDir, String scriptFile, boolean fixQuotes) throws IllegalFormatException, IOException {
       BufferedReader bfReader =
           new BufferedReader(new FileReader(scriptDir + File.separatorChar + scriptFile));
       String currLine;
@@ -231,6 +264,11 @@ public class HiveSchemaHelper {
       String currentCommand = null;
       while ((currLine = bfReader.readLine()) != null) {
         currLine = currLine.trim();
+
+        if (fixQuotes && !getQuoteCharacter().equals(DEFAULT_QUOTE)) {
+          currLine = currLine.replace("\\\"", getQuoteCharacter());
+        }
+
         if (currLine.isEmpty()) {
           continue; // skip empty lines
         }
@@ -319,11 +357,46 @@ public class HiveSchemaHelper {
     }
   }
 
+  // Derby commandline parser
+  public static class HiveCommandParser extends AbstractCommandParser {
+    private static String HIVE_NESTING_TOKEN = "SOURCE";
+    private final NestedScriptParser nestedDbCommandParser;
+
+    public HiveCommandParser(String dbOpts, String msUsername, String msPassword,
+        HiveConf hiveConf, String metaDbType) {
+      super(dbOpts, msUsername, msPassword, hiveConf);
+      nestedDbCommandParser = getDbCommandParser(metaDbType);
+    }
+
+    @Override
+    public String getQuoteCharacter() {
+      return nestedDbCommandParser.getQuoteCharacter();
+    }
+
+    @Override
+    public String getScriptName(String dbCommand) throws IllegalArgumentException {
+
+      if (!isNestedScript(dbCommand)) {
+        throw new IllegalArgumentException("Not a script format " + dbCommand);
+      }
+      String[] tokens = dbCommand.split(" ");
+      if (tokens.length != 2) {
+        throw new IllegalArgumentException("Couldn't parse line " + dbCommand);
+      }
+      return tokens[1].replace(";", "");
+    }
+
+    @Override
+    public boolean isNestedScript(String dbCommand) {
+     return dbCommand.startsWith(HIVE_NESTING_TOKEN);
+    }
+  }
+
   // MySQL parser
   public static class MySqlCommandParser extends AbstractCommandParser {
     private static final String MYSQL_NESTING_TOKEN = "SOURCE";
     private static final String DELIMITER_TOKEN = "DELIMITER";
-    private String delimiter = DEFAUTL_DELIMITER;
+    private String delimiter = DEFAULT_DELIMITER;
 
     public MySqlCommandParser(String dbOpts, String msUsername, String msPassword,
         HiveConf hiveConf) {
@@ -365,6 +438,11 @@ public class HiveSchemaHelper {
     }
 
     @Override
+    public String getQuoteCharacter() {
+      return "`";
+    }
+
+    @Override
     public boolean isNonExecCommand(String dbCommand) {
       return super.isNonExecCommand(dbCommand) ||
           (dbCommand.startsWith("/*") && dbCommand.endsWith("*/")) ||
@@ -474,14 +552,20 @@ public class HiveSchemaHelper {
   }
 
   public static NestedScriptParser getDbCommandParser(String dbName) {
-    return getDbCommandParser(dbName, null, null, null, null);
+    return getDbCommandParser(dbName, null);
+  }
+
+  public static NestedScriptParser getDbCommandParser(String dbName, String metaDbName) {
+    return getDbCommandParser(dbName, null, null, null, null, metaDbName);
   }
 
   public static NestedScriptParser getDbCommandParser(String dbName,
       String dbOpts, String msUsername, String msPassword,
-      HiveConf hiveConf) {
+      HiveConf hiveConf, String metaDbType) {
     if (dbName.equalsIgnoreCase(DB_DERBY)) {
       return new DerbyCommandParser(dbOpts, msUsername, msPassword, hiveConf);
+    } else if (dbName.equalsIgnoreCase(DB_HIVE)) {
+      return new HiveCommandParser(dbOpts, msUsername, msPassword, hiveConf, metaDbType);
     } else if (dbName.equalsIgnoreCase(DB_MSSQL)) {
       return new MSSQLCommandParser(dbOpts, msUsername, msPassword, hiveConf);
     } else if (dbName.equalsIgnoreCase(DB_MYSQL)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
index 7dd4d5f..7ab927a 100644
--- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
+++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java
@@ -71,24 +71,28 @@ public class HiveSchemaTool {
   private boolean dryRun = false;
   private boolean verbose = false;
   private String dbOpts = null;
+  private String url = null;
+  private String driver = null;
   private URI[] validationServers = null; // The list of servers the database/partition/table can locate on
   private final HiveConf hiveConf;
   private final String dbType;
+  private final String metaDbType;
   private final MetaStoreSchemaInfo metaStoreSchemaInfo;
 
   static final private Logger LOG = LoggerFactory.getLogger(HiveSchemaTool.class.getName());
 
-  public HiveSchemaTool(String dbType) throws HiveMetaException {
-    this(System.getenv("HIVE_HOME"), new HiveConf(HiveSchemaTool.class), dbType);
+  public HiveSchemaTool(String dbType, String metaDbType) throws HiveMetaException {
+    this(System.getenv("HIVE_HOME"), new HiveConf(HiveSchemaTool.class), dbType, metaDbType);
   }
 
-  public HiveSchemaTool(String hiveHome, HiveConf hiveConf, String dbType)
+  public HiveSchemaTool(String hiveHome, HiveConf hiveConf, String dbType, String metaDbType)
       throws HiveMetaException {
     if (hiveHome == null || hiveHome.isEmpty()) {
       throw new HiveMetaException("No Hive home directory provided");
     }
     this.hiveConf = hiveConf;
     this.dbType = dbType;
+    this.metaDbType = metaDbType;
     this.metaStoreSchemaInfo = new MetaStoreSchemaInfo(hiveHome, dbType);
   }
 
@@ -96,6 +100,14 @@ public class HiveSchemaTool {
     return hiveConf;
   }
 
+  public void setUrl(String url) {
+    this.url = url;
+  }
+
+  public void setDriver(String driver) {
+    this.driver = driver;
+  }
+
   public void setUserName(String userName) {
     this.userName = userName;
   }
@@ -135,12 +147,17 @@ public class HiveSchemaTool {
   Connection getConnectionToMetastore(boolean printInfo)
       throws HiveMetaException {
     return HiveSchemaHelper.getConnectionToMetastore(userName,
-        passWord, printInfo, hiveConf);
+        passWord, url, driver, printInfo, hiveConf);
+  }
+
+  private NestedScriptParser getDbCommandParser(String dbType, String metaDbType) {
+    return HiveSchemaHelper.getDbCommandParser(dbType, dbOpts, userName,
+	passWord, hiveConf, metaDbType);
   }
 
   private NestedScriptParser getDbCommandParser(String dbType) {
     return HiveSchemaHelper.getDbCommandParser(dbType, dbOpts, userName,
-        passWord, hiveConf);
+	passWord, hiveConf, null);
   }
 
   /***
@@ -936,9 +953,14 @@ public class HiveSchemaTool {
    */
   private void runBeeLine(String scriptDir, String scriptFile)
       throws IOException, HiveMetaException {
-    NestedScriptParser dbCommandParser = getDbCommandParser(dbType);
+    NestedScriptParser dbCommandParser = getDbCommandParser(dbType, metaDbType);
+
     // expand the nested script
-    String sqlCommands = dbCommandParser.buildCommand(scriptDir, scriptFile);
+    // If the metaDbType is set, this is setting up the information
+    // schema in Hive. That specifically means that the sql commands need
+    // to be adjusted for the underlying RDBMS (correct quotation
+    // strings, etc).
+    String sqlCommands = dbCommandParser.buildCommand(scriptDir, scriptFile, metaDbType != null);
     File tmpFile = File.createTempFile("schematool", ".sql");
     tmpFile.deleteOnExit();
 
@@ -954,7 +976,8 @@ public class HiveSchemaTool {
 
   // Generate the beeline args per hive conf and execute the given script
   public void runBeeLine(String sqlScriptFile) throws IOException {
-    CommandBuilder builder = new CommandBuilder(hiveConf, userName, passWord, sqlScriptFile);
+    CommandBuilder builder = new CommandBuilder(hiveConf, url, driver,
+        userName, passWord, sqlScriptFile);
 
     // run the script using Beeline
     try (BeeLine beeLine = new BeeLine()) {
@@ -980,11 +1003,16 @@ public class HiveSchemaTool {
     private final String userName;
     private final String password;
     private final String sqlScriptFile;
+    private final String driver;
+    private final String url;
 
-    CommandBuilder(HiveConf hiveConf, String userName, String password, String sqlScriptFile) {
+    CommandBuilder(HiveConf hiveConf, String url, String driver,
+        String userName, String password, String sqlScriptFile) {
       this.hiveConf = hiveConf;
       this.userName = userName;
       this.password = password;
+      this.url = url;
+      this.driver = driver;
       this.sqlScriptFile = sqlScriptFile;
     }
 
@@ -998,10 +1026,14 @@ public class HiveSchemaTool {
     }
 
     private String[] argsWith(String password) throws IOException {
-      return new String[] { "-u",
-          HiveSchemaHelper.getValidConfVar(ConfVars.METASTORECONNECTURLKEY, hiveConf), "-d",
-          HiveSchemaHelper.getValidConfVar(ConfVars.METASTORE_CONNECTION_DRIVER, hiveConf), "-n",
-          userName, "-p", password, "-f", sqlScriptFile };
+      return new String[]
+        {
+          "-u", url == null ? HiveSchemaHelper.getValidConfVar(ConfVars.METASTORECONNECTURLKEY, hiveConf) : url,
+          "-d", driver == null ? HiveSchemaHelper.getValidConfVar(ConfVars.METASTORE_CONNECTION_DRIVER, hiveConf) : driver,
+          "-n", userName,
+          "-p", password,
+          "-f", sqlScriptFile
+        };
     }
 
     private void logScript() throws IOException {
@@ -1049,6 +1081,15 @@ public class HiveSchemaTool {
     Option dbTypeOpt = OptionBuilder.withArgName("databaseType")
                 .hasArgs().withDescription("Metastore database type")
                 .create("dbType");
+    Option metaDbTypeOpt = OptionBuilder.withArgName("metaDatabaseType")
+                .hasArgs().withDescription("Used only if upgrading the system catalog for hive")
+                .create("metaDbType");
+    Option urlOpt = OptionBuilder.withArgName("url")
+                .hasArgs().withDescription("connection url to the database")
+                .create("url");
+    Option driverOpt = OptionBuilder.withArgName("driver")
+                .hasArgs().withDescription("driver name for connection")
+                .create("driver");
     Option dbOpts = OptionBuilder.withArgName("databaseOpts")
                 .hasArgs().withDescription("Backend DB specific options")
                 .create("dbOpts");
@@ -1063,6 +1104,9 @@ public class HiveSchemaTool {
     cmdLineOptions.addOption(passwdOpt);
     cmdLineOptions.addOption(dbTypeOpt);
     cmdLineOptions.addOption(verboseOpt);
+    cmdLineOptions.addOption(metaDbTypeOpt);
+    cmdLineOptions.addOption(urlOpt);
+    cmdLineOptions.addOption(driverOpt);
     cmdLineOptions.addOption(dbOpts);
     cmdLineOptions.addOption(serversOpt);
     cmdLineOptions.addOptionGroup(optGroup);
@@ -1072,6 +1116,7 @@ public class HiveSchemaTool {
     CommandLineParser parser = new GnuParser();
     CommandLine line = null;
     String dbType = null;
+    String metaDbType = null;
     String schemaVer = null;
     Options cmdLineOptions = new Options();
 
@@ -1093,6 +1138,7 @@ public class HiveSchemaTool {
     if (line.hasOption("dbType")) {
       dbType = line.getOptionValue("dbType");
       if ((!dbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) &&
+          !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE) &&
           !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MSSQL) &&
           !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) &&
           !dbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) && !dbType
@@ -1105,9 +1151,31 @@ public class HiveSchemaTool {
       printAndExit(cmdLineOptions);
     }
 
+    if (line.hasOption("metaDbType")) {
+      metaDbType = line.getOptionValue("metaDbType");
+
+      if (!dbType.equals(HiveSchemaHelper.DB_HIVE)) {
+        System.err.println("metaDbType only supported for dbType = hive");
+        printAndExit(cmdLineOptions);
+      }
+
+      if (!metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_DERBY) &&
+          !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_MSSQL) &&
+          !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_MYSQL) &&
+          !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_POSTGRACE) &&
+          !metaDbType.equalsIgnoreCase(HiveSchemaHelper.DB_ORACLE)) {
+        System.err.println("Unsupported metaDbType " + metaDbType);
+        printAndExit(cmdLineOptions);
+      }
+    } else if (dbType.equalsIgnoreCase(HiveSchemaHelper.DB_HIVE)) {
+      System.err.println("no metaDbType supplied");
+      printAndExit(cmdLineOptions);
+    }
+
+
     System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "true");
     try {
-      HiveSchemaTool schemaTool = new HiveSchemaTool(dbType);
+      HiveSchemaTool schemaTool = new HiveSchemaTool(dbType, metaDbType);
 
       if (line.hasOption("userName")) {
         schemaTool.setUserName(line.getOptionValue("userName"));
@@ -1125,6 +1193,12 @@ public class HiveSchemaTool {
           throw new HiveMetaException("Error getting metastore password", err);
         }
       }
+      if (line.hasOption("url")) {
+        schemaTool.setUrl(line.getOptionValue("url"));
+      }
+      if (line.hasOption("driver")) {
+        schemaTool.setDriver(line.getOptionValue("driver"));
+      }
       if (line.hasOption("dryRun")) {
         schemaTool.setDryRun(true);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/beeline/src/test/org/apache/hive/beeline/TestHiveSchemaTool.java
----------------------------------------------------------------------
diff --git a/beeline/src/test/org/apache/hive/beeline/TestHiveSchemaTool.java b/beeline/src/test/org/apache/hive/beeline/TestHiveSchemaTool.java
index 4cd5124..716bce7 100644
--- a/beeline/src/test/org/apache/hive/beeline/TestHiveSchemaTool.java
+++ b/beeline/src/test/org/apache/hive/beeline/TestHiveSchemaTool.java
@@ -64,7 +64,7 @@ public class TestHiveSchemaTool {
     if (!file.exists()) {
       file.createNewFile();
     }
-    builder = new HiveSchemaTool.CommandBuilder(hiveConf, "testUser", pasword, scriptFile);
+    builder = new HiveSchemaTool.CommandBuilder(hiveConf, null, null, "testUser", pasword, scriptFile);
   }
 
   @After
@@ -87,4 +87,4 @@ public class TestHiveSchemaTool {
     String[] strings = builder.buildToRun();
     assertTrue(Arrays.asList(strings).contains(pasword));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index d6a80ae..6068f0d 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -597,6 +597,8 @@ public class HiveConf extends Configuration {
     HADOOPNUMREDUCERS("mapreduce.job.reduces", -1, "", true),
 
     // Metastore stuff. Be sure to update HiveConf.metaVars when you add something here!
+    METASTOREDBTYPE("hive.metastore.db.type", "DERBY", new StringSet("DERBY", "ORACLE", "MYSQL", "MSSQL", "POSTGRES"),
+        "Type of database used by the metastore. Information schema & JDBCStorageHandler depend on it."),
     METASTOREWAREHOUSE("hive.metastore.warehouse.dir", "/user/hive/warehouse",
         "location of default database for the warehouse"),
     METASTOREURIS("hive.metastore.uris", "",

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
index 4510db3..4ed4df1 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
@@ -200,6 +200,12 @@ public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStor
   }
 
   @Override
+  public void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> jobSecrets
+  ) {
+
+  }
+
+  @Override
   public void preCreateTable(Table table) throws MetaException {
     // Do safety checks
     if (MetaStoreUtils.isExternalTable(table) && !StringUtils

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
index 586fc72..438a7d6 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
@@ -57,7 +57,7 @@ public class TestSchemaTool extends TestCase {
         "jdbc:derby:" + testMetastoreDB + ";create=true");
     hiveConf = new HiveConf(this.getClass());
     schemaTool = new HiveSchemaTool(
-        System.getProperty("test.tmp.dir", "target/tmp"), hiveConf, "derby");
+        System.getProperty("test.tmp.dir", "target/tmp"), hiveConf, "derby", null);
     schemaTool.setUserName(
         schemaTool.getHiveConf().get(HiveConf.ConfVars.METASTORE_CONNECTION_USER_NAME.varname));
     schemaTool.setPassWord(ShimLoader.getHadoopShims().getPassword(schemaTool.getHiveConf(),
@@ -590,7 +590,7 @@ public class TestSchemaTool extends TestCase {
     NestedScriptParser dbOptParser = HiveSchemaHelper.getDbCommandParser(
         "postgres",
         PostgresCommandParser.POSTGRES_SKIP_STANDARD_STRINGS_DBOPT,
-        null, null, null);
+        null, null, null, null);
     expectedSQL = StringUtils.join(
         expectedScriptWithOptionAbsent, System.getProperty("line.separator")) +
             System.getProperty("line.separator");

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index e2c3992..a378a5d 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -583,6 +583,7 @@ minillaplocal.query.files=acid_globallimit.q,\
   subquery_select.q, \
   subquery_shared_alias.q, \
   subquery_null_agg.q,\
+  sysdb.q,\
   table_access_keys_stats.q,\
   tez_bmj_schema_evolution.q,\
   tez_dml.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
index bfa7a26..6def148 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
@@ -59,15 +59,23 @@ public class JdbcInputFormat extends HiveInputFormat<LongWritable, MapWritable>
   @Override
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
     try {
-      if (numSplits <= 0) {
-        numSplits = 1;
-      }
+
       LOGGER.debug("Creating {} input splits", numSplits);
+
       if (dbAccessor == null) {
         dbAccessor = DatabaseAccessorFactory.getAccessor(job);
       }
 
       int numRecords = dbAccessor.getTotalNumberOfRecords(job);
+
+      if (numRecords < numSplits) {
+        numSplits = numRecords;
+      }
+
+      if (numSplits <= 0) {
+        numSplits = 1;
+      }
+
       int numRecordsPerSplit = numRecords / numSplits;
       int numSplitsWithExtraRecords = numRecords % numSplits;
 
@@ -86,6 +94,7 @@ public class JdbcInputFormat extends HiveInputFormat<LongWritable, MapWritable>
         offset += numRecordsInThisSplit;
       }
 
+      dbAccessor = null;
       return splits;
     }
     catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordReader.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordReader.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordReader.java
index 0a24bd9..8321a66 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordReader.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcRecordReader.java
@@ -15,6 +15,7 @@
 package org.apache.hive.storage.jdbc;
 
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
@@ -63,7 +64,8 @@ public class JdbcRecordReader implements RecordReader<LongWritable, MapWritable>
         Map<String, String> record = iterator.next();
         if ((record != null) && (!record.isEmpty())) {
           for (Entry<String, String> entry : record.entrySet()) {
-            value.put(new Text(entry.getKey()), new Text(entry.getValue()));
+            value.put(new Text(entry.getKey()),
+                entry.getValue() == null ? NullWritable.get() : new Text(entry.getValue()));
           }
           return true;
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
index f35c33d..e785e9c 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcSerDe.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -61,7 +62,6 @@ public class JdbcSerDe extends AbstractSerDe {
     try {
       LOGGER.debug("Initializing the SerDe");
 
-      // Hive cdh-4.3 does not provide the properties object on all calls
       if (tbl.containsKey(JdbcStorageConfig.DATABASE_TYPE.getPropertyName())) {
         Configuration tableConfig = JdbcStorageConfigManager.convertPropertiesToConfiguration(tbl);
 
@@ -126,7 +126,7 @@ public class JdbcSerDe extends AbstractSerDe {
     for (int i = 0; i < numColumns; i++) {
       columnKey.set(columnNames.get(i));
       Writable value = input.get(columnKey);
-      if (value == null) {
+      if (value == null || value instanceof NullWritable) {
         row.add(null);
       }
       else {

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
index 946ee0c..4b03285 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
@@ -24,14 +24,18 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hive.storage.jdbc.conf.JdbcStorageConfigManager;
 
+import java.lang.IllegalArgumentException;
 import java.util.Map;
 import java.util.Properties;
 
 public class JdbcStorageHandler implements HiveStorageHandler {
 
+  private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageHandler.class);
   private Configuration conf;
 
 
@@ -72,27 +76,44 @@ public class JdbcStorageHandler implements HiveStorageHandler {
     return null;
   }
 
-
   @Override
-  public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
-    Properties properties = tableDesc.getProperties();
-    JdbcStorageConfigManager.copyConfigurationToJob(properties, jobProperties);
+  public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
+    try {
+      LOGGER.debug("Adding properties to input job conf");
+      Properties properties = tableDesc.getProperties();
+      JdbcStorageConfigManager.copyConfigurationToJob(properties, jobProperties);
+    } catch (Exception e) {
+      throw new IllegalArgumentException(e);
+    }
   }
 
-
   @Override
-  public void configureInputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
-    Properties properties = tableDesc.getProperties();
-    JdbcStorageConfigManager.copyConfigurationToJob(properties, jobProperties);
+  public void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> jobSecrets) {
+    try {
+      LOGGER.debug("Adding secrets to input job conf");
+      Properties properties = tableDesc.getProperties();
+      JdbcStorageConfigManager.copySecretsToJob(properties, jobSecrets);
+    } catch (Exception e) {
+      throw new IllegalArgumentException(e);
+    }
   }
 
+  @Override
+  public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
+    try {
+      LOGGER.debug("Adding properties to input job conf");
+      Properties properties = tableDesc.getProperties();
+      JdbcStorageConfigManager.copyConfigurationToJob(properties, jobProperties);
+    } catch (Exception e) {
+      throw new IllegalArgumentException(e);
+    }
+  }
 
   @Override
   public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
     // Nothing to do here...
   }
 
-
   @Override
   public HiveAuthorizationProvider getAuthorizationProvider() throws HiveException {
     return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/DatabaseType.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/DatabaseType.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/DatabaseType.java
index a2bdbe4..c4e97ba 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/DatabaseType.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/DatabaseType.java
@@ -17,5 +17,9 @@ package org.apache.hive.storage.jdbc.conf;
 public enum DatabaseType {
   MYSQL,
   H2,
-  DERBY
+  DERBY,
+  ORACLE,
+  POSTGRES,
+  MSSQL,
+  METASTORE
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
index 5267cda..350b0c6 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
@@ -14,10 +14,20 @@
  */
 package org.apache.hive.storage.jdbc.conf;
 
+import java.io.IOException;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.storage.jdbc.conf.DatabaseType;
+
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hive.storage.jdbc.QueryConditionBuilder;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.util.EnumSet;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -28,28 +38,48 @@ import java.util.Properties;
  */
 public class JdbcStorageConfigManager {
 
+  private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageConfigManager.class);
   public static final String CONFIG_PREFIX = "hive.sql";
+  public static final String CONFIG_PWD = CONFIG_PREFIX + ".dbcp.password";
+  public static final String CONFIG_USERNAME = CONFIG_PREFIX + ".dbcp.username";
   private static final EnumSet<JdbcStorageConfig> DEFAULT_REQUIRED_PROPERTIES =
     EnumSet.of(JdbcStorageConfig.DATABASE_TYPE,
         JdbcStorageConfig.JDBC_URL,
         JdbcStorageConfig.JDBC_DRIVER_CLASS,
         JdbcStorageConfig.QUERY);
 
+  private static final EnumSet<JdbcStorageConfig> METASTORE_REQUIRED_PROPERTIES =
+    EnumSet.of(JdbcStorageConfig.DATABASE_TYPE,
+        JdbcStorageConfig.QUERY);
 
   private JdbcStorageConfigManager() {
   }
 
-
-  public static void copyConfigurationToJob(Properties props, Map<String, String> jobProps) {
+  public static void copyConfigurationToJob(Properties props, Map<String, String> jobProps)
+    throws HiveException, IOException {
     checkRequiredPropertiesAreDefined(props);
+    resolveMetadata(props);
     for (Entry<Object, Object> entry : props.entrySet()) {
-      jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue()));
+      if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD)) {
+        jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue()));
+      }
     }
   }
 
+  public static void copySecretsToJob(Properties props, Map<String, String> jobSecrets)
+    throws HiveException, IOException {
+    checkRequiredPropertiesAreDefined(props);
+    resolveMetadata(props);
+    String secret = props.getProperty(CONFIG_PWD);
+    if (secret != null) {
+      jobSecrets.put(CONFIG_PWD, secret);
+    }
+  }
 
-  public static Configuration convertPropertiesToConfiguration(Properties props) {
+  public static Configuration convertPropertiesToConfiguration(Properties props)
+    throws HiveException, IOException {
     checkRequiredPropertiesAreDefined(props);
+    resolveMetadata(props);
     Configuration conf = new Configuration();
 
     for (Entry<Object, Object> entry : props.entrySet()) {
@@ -61,14 +91,23 @@ public class JdbcStorageConfigManager {
 
 
   private static void checkRequiredPropertiesAreDefined(Properties props) {
-    for (JdbcStorageConfig configKey : DEFAULT_REQUIRED_PROPERTIES) {
+    DatabaseType dbType = null;
+
+    try {
+      String dbTypeName = props.getProperty(JdbcStorageConfig.DATABASE_TYPE.getPropertyName());
+      dbType = DatabaseType.valueOf(dbTypeName);
+    } catch (Exception e) {
+      throw new IllegalArgumentException("Unknown database type.", e);
+    }
+
+    for (JdbcStorageConfig configKey : (DatabaseType.METASTORE.equals(dbType)
+            ? METASTORE_REQUIRED_PROPERTIES : DEFAULT_REQUIRED_PROPERTIES)) {
       String propertyKey = configKey.getPropertyName();
       if ((props == null) || (!props.containsKey(propertyKey)) || (isEmptyString(props.getProperty(propertyKey)))) {
         throw new IllegalArgumentException("Property " + propertyKey + " is required.");
       }
     }
 
-    DatabaseType dbType = DatabaseType.valueOf(props.getProperty(JdbcStorageConfig.DATABASE_TYPE.getPropertyName()));
     CustomConfigManager configManager = CustomConfigManagerFactory.getCustomConfigManagerFor(dbType);
     configManager.checkRequiredProperties(props);
   }
@@ -94,4 +133,51 @@ public class JdbcStorageConfigManager {
     return ((value == null) || (value.trim().isEmpty()));
   }
 
+  private static void resolveMetadata(Properties props) throws HiveException, IOException {
+    DatabaseType dbType = DatabaseType.valueOf(
+      props.getProperty(JdbcStorageConfig.DATABASE_TYPE.getPropertyName()));
+
+    LOGGER.debug("Resolving db type: {}", dbType.toString());
+
+    if (dbType == DatabaseType.METASTORE) {
+      HiveConf hconf = Hive.get().getConf();
+      props.setProperty(JdbcStorageConfig.JDBC_URL.getPropertyName(),
+          getMetastoreConnectionURL(hconf));
+      props.setProperty(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName(),
+          getMetastoreDriver(hconf));
+
+      String user = getMetastoreJdbcUser(hconf);
+      if (user != null) {
+        props.setProperty(CONFIG_USERNAME, user);
+      }
+
+      String pwd = getMetastoreJdbcPasswd(hconf);
+      if (pwd != null) {
+        props.setProperty(CONFIG_PWD, pwd);
+      }
+      props.setProperty(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(),
+          getMetastoreDatabaseType(hconf));
+    }
+  }
+
+  private static String getMetastoreDatabaseType(HiveConf conf) {
+    return conf.getVar(HiveConf.ConfVars.METASTOREDBTYPE);
+  }
+
+  private static String getMetastoreConnectionURL(HiveConf conf) {
+    return conf.getVar(HiveConf.ConfVars.METASTORECONNECTURLKEY);
+  }
+
+  private static String getMetastoreDriver(HiveConf conf) {
+    return conf.getVar(HiveConf.ConfVars.METASTORE_CONNECTION_DRIVER);
+  }
+
+  private static String getMetastoreJdbcUser(HiveConf conf) {
+    return conf.getVar(HiveConf.ConfVars.METASTORE_CONNECTION_USER_NAME);
+  }
+
+  private static String getMetastoreJdbcPasswd(HiveConf conf) throws IOException {
+    return ShimLoader.getHadoopShims().getPassword(conf,
+        HiveConf.ConfVars.METASTOREPWD.varname);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
index b655aec..178c97d 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
@@ -16,6 +16,10 @@ package org.apache.hive.storage.jdbc.dao;
 
 import org.apache.commons.dbcp.BasicDataSourceFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,6 +49,7 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
   protected static final int DEFAULT_FETCH_SIZE = 1000;
   protected static final Logger LOGGER = LoggerFactory.getLogger(GenericJdbcDatabaseAccessor.class);
   protected DataSource dbcpDataSource = null;
+  protected static final Text DBCP_PWD = new Text(DBCP_CONFIG_PREFIX + ".password");
 
 
   public GenericJdbcDatabaseAccessor() {
@@ -97,7 +102,7 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
       initializeDatabaseConnection(conf);
       String sql = JdbcStorageConfigManager.getQueryToExecute(conf);
       String countQuery = "SELECT COUNT(*) FROM (" + sql + ") tmptable";
-      LOGGER.debug("Query to execute is [{}]", countQuery);
+      LOGGER.info("Query to execute is [{}]", countQuery);
 
       conn = dbcpDataSource.getConnection();
       ps = conn.prepareStatement(countQuery);
@@ -135,7 +140,7 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
       initializeDatabaseConnection(conf);
       String sql = JdbcStorageConfigManager.getQueryToExecute(conf);
       String limitQuery = addLimitAndOffsetToQuery(sql, limit, offset);
-      LOGGER.debug("Query to execute is [{}]", limitQuery);
+      LOGGER.info("Query to execute is [{}]", limitQuery);
 
       conn = dbcpDataSource.getConnection();
       ps = conn.prepareStatement(limitQuery, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
@@ -216,7 +221,7 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
   }
 
 
-  protected Properties getConnectionPoolProperties(Configuration conf) {
+  protected Properties getConnectionPoolProperties(Configuration conf) throws Exception {
     // Create the default properties object
     Properties dbProperties = getDefaultDBCPProperties();
 
@@ -228,6 +233,13 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
       }
     }
 
+    // handle password
+    Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
+    if (credentials.getSecretKey(DBCP_PWD) != null) {
+      LOGGER.info("found token in credentials");
+      dbProperties.put(DBCP_PWD,new String(credentials.getSecretKey(DBCP_PWD)));
+    }
+
     // essential properties that shouldn't be overridden by users
     dbProperties.put("url", conf.get(JdbcStorageConfig.JDBC_URL.getPropertyName()));
     dbProperties.put("driverClassName", conf.get(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName()));

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java
index 4262502..8938766 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java
@@ -66,9 +66,6 @@ public class JdbcRecordIterator implements Iterator<Map<String, String>> {
       for (int i = 0; i < numColumns; i++) {
         String key = metadata.getColumnName(i + 1);
         String value = rs.getString(i + 1);
-        if (value == null) {
-          value = NullWritable.get().toString();
-        }
         record.put(key, value);
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MySqlDatabaseAccessor.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MySqlDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MySqlDatabaseAccessor.java
index 7d821d8..86fde7c 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MySqlDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MySqlDatabaseAccessor.java
@@ -26,7 +26,7 @@ public class MySqlDatabaseAccessor extends GenericJdbcDatabaseAccessor {
       return addLimitToQuery(sql, limit);
     }
     else {
-      return sql + " LIMIT " + limit + "," + offset;
+      return sql + " LIMIT " + offset + "," + limit;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java b/jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java
index c950831..800172c 100644
--- a/jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java
+++ b/jdbc-handler/src/test/java/org/apache/hive/config/JdbcStorageConfigManagerTest.java
@@ -20,6 +20,7 @@ import static org.hamcrest.Matchers.notNullValue;
 import static org.junit.Assert.assertThat;
 
 import org.junit.Test;
+import org.junit.Ignore;
 
 import org.apache.hive.storage.jdbc.conf.DatabaseType;
 import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig;
@@ -32,7 +33,7 @@ import java.util.Properties;
 public class JdbcStorageConfigManagerTest {
 
   @Test
-  public void testWithAllRequiredSettingsDefined() {
+  public void testWithAllRequiredSettingsDefined() throws Exception {
     Properties props = new Properties();
     props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), DatabaseType.MYSQL.toString());
     props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive");
@@ -51,8 +52,9 @@ public class JdbcStorageConfigManagerTest {
   }
 
 
-  @Test(expected = IllegalArgumentException.class)
-  public void testWithJdbcUrlMissing() {
+  // since metastore connections don't require the url, this is allowable.
+  @Ignore @Test(expected = IllegalArgumentException.class)
+  public void testWithJdbcUrlMissing() throws Exception {
     Properties props = new Properties();
     props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), DatabaseType.MYSQL.toString());
     props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable");
@@ -63,7 +65,7 @@ public class JdbcStorageConfigManagerTest {
 
 
   @Test(expected = IllegalArgumentException.class)
-  public void testWithDatabaseTypeMissing() {
+  public void testWithDatabaseTypeMissing() throws Exception {
     Properties props = new Properties();
     props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive");
     props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable");
@@ -74,7 +76,7 @@ public class JdbcStorageConfigManagerTest {
 
 
   @Test(expected = IllegalArgumentException.class)
-  public void testWithUnknownDatabaseType() {
+  public void testWithUnknownDatabaseType() throws Exception {
     Properties props = new Properties();
     props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "Postgres");
     props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive");

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index 6bf9550..4d30296 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -89,7 +89,9 @@ public class LlapServiceDriver {
   protected static final Logger LOG = LoggerFactory.getLogger(LlapServiceDriver.class.getName());
 
   private static final String[] DEFAULT_AUX_CLASSES = new String[] {
-  "org.apache.hive.hcatalog.data.JsonSerDe","org.apache.hadoop.hive.druid.DruidStorageHandler" };
+    "org.apache.hive.hcatalog.data.JsonSerDe","org.apache.hadoop.hive.druid.DruidStorageHandler",
+    "org.apache.hive.storage.jdbc.JdbcStorageHandler"
+  };
   private static final String HBASE_SERDE_CLASS = "org.apache.hadoop.hive.hbase.HBaseSerDe";
   private static final String[] NEEDED_CONFIGS = LlapDaemonConfiguration.DAEMON_CONFIGS;
   private static final String[] OPTIONAL_CONFIGS = LlapDaemonConfiguration.SSL_DAEMON_CONFIGS;

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
new file mode 100644
index 0000000..a70884c
--- /dev/null
+++ b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
@@ -0,0 +1,1223 @@
+-- HIVE system db
+
+CREATE DATABASE SYS;
+
+USE SYS;
+
+CREATE TABLE IF NOT EXISTS `BUCKETING_COLS` (
+  `SD_ID` bigint,
+  `BUCKET_COL_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_BUCKETING_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"BUCKET_COL_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"BUCKETING_COLS\""
+);
+
+CREATE TABLE IF NOT EXISTS `CDS` (
+  `CD_ID` bigint,
+  CONSTRAINT `SYS_PK_CDS` PRIMARY KEY (`CD_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CD_ID\"
+FROM
+  \"CDS\""
+);
+
+CREATE TABLE IF NOT EXISTS `COLUMNS_V2` (
+  `CD_ID` bigint,
+  `COMMENT` string,
+  `COLUMN_NAME` string,
+  `TYPE_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_COLUMN_V2` PRIMARY KEY (`CD_ID`,`COLUMN_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CD_ID\",
+  \"COMMENT\",
+  \"COLUMN_NAME\",
+  \"TYPE_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"COLUMNS_V2\""
+);
+
+CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` (
+  `DB_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_DATABASE_PARAMS` PRIMARY KEY (`DB_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"DATABASE_PARAMS\""
+);
+
+CREATE TABLE IF NOT EXISTS `DBS` (
+  `DB_ID` bigint,
+  `DB_LOCATION_URI` string,
+  `NAME` string,
+  `OWNER_NAME` string,
+  `OWNER_TYPE` string,
+  CONSTRAINT `SYS_PK_DBS` PRIMARY KEY (`DB_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_ID\",
+  \"DB_LOCATION_URI\",
+  \"NAME\",
+  \"OWNER_NAME\",
+  \"OWNER_TYPE\"
+FROM
+  DBS"
+);
+
+CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
+  `DB_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `DB_PRIV` string,
+  CONSTRAINT `SYS_PK_DB_PRIVS` PRIMARY KEY (`DB_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"DB_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"DB_PRIV\"
+FROM
+  \"DB_PRIVS\""
+);
+
+CREATE TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
+  `USER_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` string,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `USER_PRIV` string,
+  CONSTRAINT `SYS_PK_GLOBAL_PRIVS` PRIMARY KEY (`USER_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"USER_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"USER_PRIV\"
+FROM
+  \"GLOBAL_PRIVS\""
+);
+
+CREATE TABLE IF NOT EXISTS `IDXS` (
+  `INDEX_ID` bigint,
+  `CREATE_TIME` int,
+  `DEFERRED_REBUILD` boolean,
+  `INDEX_HANDLER_CLASS` string,
+  `INDEX_NAME` string,
+  `INDEX_TBL_ID` bigint,
+  `LAST_ACCESS_TIME` int,
+  `ORIG_TBL_ID` bigint,
+  `SD_ID` bigint,
+  CONSTRAINT `SYS_PK_IDXS` PRIMARY KEY (`INDEX_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"INDEX_ID\",
+  \"CREATE_TIME\",
+  \"DEFERRED_REBUILD\",
+  \"INDEX_HANDLER_CLASS\",
+  \"INDEX_NAME\",
+  \"INDEX_TBL_ID\",
+  \"LAST_ACCESS_TIME\",
+  \"ORIG_TBL_ID\",
+  \"SD_ID\"
+FROM
+  \"IDXS\""
+);
+
+CREATE TABLE IF NOT EXISTS `INDEX_PARAMS` (
+  `INDEX_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_INDEX_PARAMS` PRIMARY KEY (`INDEX_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"INDEX_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"INDEX_PARAMS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PARTITIONS` (
+  `PART_ID` bigint,
+  `CREATE_TIME` int,
+  `LAST_ACCESS_TIME` int,
+  `PART_NAME` string,
+  `SD_ID` bigint,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_PARTITIONS` PRIMARY KEY (`PART_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"CREATE_TIME\",
+  \"LAST_ACCESS_TIME\",
+  \"PART_NAME\",
+  \"SD_ID\",
+  \"TBL_ID\"
+FROM
+  \"PARTITIONS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PARTITION_KEYS` (
+  `TBL_ID` bigint,
+  `PKEY_COMMENT` string,
+  `PKEY_NAME` string,
+  `PKEY_TYPE` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_PARTITION_KEYS` PRIMARY KEY (`TBL_ID`,`PKEY_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"PKEY_COMMENT\",
+  \"PKEY_NAME\",
+  \"PKEY_TYPE\",
+  \"INTEGER_IDX\"
+FROM
+  \"PARTITION_KEYS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PARTITION_KEY_VALS` (
+  `PART_ID` bigint,
+  `PART_KEY_VAL` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_PARTITION_KEY_VALS` PRIMARY KEY (`PART_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"PART_KEY_VAL\",
+  \"INTEGER_IDX\"
+FROM
+  \"PARTITION_KEY_VALS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PARTITION_PARAMS` (
+  `PART_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_PARTITION_PARAMS` PRIMARY KEY (`PART_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"PARTITION_PARAMS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PART_COL_PRIVS` (
+  `PART_COLUMN_GRANT_ID` bigint,
+  `COLUMN_NAME` string,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PART_ID` bigint,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `PART_COL_PRIV` string,
+  CONSTRAINT `SYS_PK_PART_COL_PRIVS` PRIMARY KEY (`PART_COLUMN_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_COLUMN_GRANT_ID\",
+  \"COLUMN_NAME\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PART_ID\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"PART_COL_PRIV\"
+FROM
+  \"PART_COL_PRIVS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PART_PRIVS` (
+  `PART_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PART_ID` bigint,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `PART_PRIV` string,
+  CONSTRAINT `SYS_PK_PART_PRIVS` PRIMARY KEY (`PART_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"PART_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PART_ID\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"PART_PRIV\"
+FROM
+  \"PART_PRIVS\""
+);
+
+CREATE TABLE IF NOT EXISTS `ROLES` (
+  `ROLE_ID` bigint,
+  `CREATE_TIME` int,
+  `OWNER_NAME` string,
+  `ROLE_NAME` string,
+  CONSTRAINT `SYS_PK_ROLES` PRIMARY KEY (`ROLE_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"ROLE_ID\",
+  \"CREATE_TIME\",
+  \"OWNER_NAME\",
+  \"ROLE_NAME\"
+FROM
+  \"ROLES\""
+);
+
+CREATE TABLE IF NOT EXISTS `ROLE_MAP` (
+  `ROLE_GRANT_ID` bigint,
+  `ADD_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `ROLE_ID` bigint,
+  CONSTRAINT `SYS_PK_ROLE_MAP` PRIMARY KEY (`ROLE_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"ROLE_GRANT_ID\",
+  \"ADD_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"ROLE_ID\"
+FROM
+  \"ROLE_MAP\""
+);
+
+CREATE TABLE IF NOT EXISTS `SDS` (
+  `SD_ID` bigint,
+  `CD_ID` bigint,
+  `INPUT_FORMAT` string,
+  `IS_COMPRESSED` boolean,
+  `IS_STOREDASSUBDIRECTORIES` boolean,
+  `LOCATION` string,
+  `NUM_BUCKETS` int,
+  `OUTPUT_FORMAT` string,
+  `SERDE_ID` bigint,
+  CONSTRAINT `SYS_PK_SDS` PRIMARY KEY (`SD_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"CD_ID\",
+  \"INPUT_FORMAT\",
+  \"IS_COMPRESSED\",
+  \"IS_STOREDASSUBDIRECTORIES\",
+  \"LOCATION\",
+  \"NUM_BUCKETS\",
+  \"OUTPUT_FORMAT\",
+  \"SERDE_ID\"
+FROM
+  \"SDS\""
+);
+
+CREATE TABLE IF NOT EXISTS `SD_PARAMS` (
+  `SD_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_SD_PARAMS` PRIMARY KEY (`SD_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"SD_PARAMS\""
+);
+
+CREATE TABLE IF NOT EXISTS `SEQUENCE_TABLE` (
+  `SEQUENCE_NAME` string,
+  `NEXT_VAL` bigint,
+  CONSTRAINT `SYS_PK_SEQUENCE_TABLE` PRIMARY KEY (`SEQUENCE_NAME`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SEQUENCE_NAME\",
+  \"NEXT_VAL\"
+FROM
+  \"SEQUENCE_TABLE\""
+);
+
+CREATE TABLE IF NOT EXISTS `SERDES` (
+  `SERDE_ID` bigint,
+  `NAME` string,
+  `SLIB` string,
+  CONSTRAINT `SYS_PK_SERDES` PRIMARY KEY (`SERDE_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SERDE_ID\",
+  \"NAME\",
+  \"SLIB\"
+FROM
+  \"SERDES\""
+);
+
+CREATE TABLE IF NOT EXISTS `SERDE_PARAMS` (
+  `SERDE_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_SERDE_PARAMS` PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SERDE_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"SERDE_PARAMS\""
+);
+
+CREATE TABLE IF NOT EXISTS `SKEWED_COL_NAMES` (
+  `SD_ID` bigint,
+  `SKEWED_COL_NAME` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_COL_NAMES` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"SKEWED_COL_NAME\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_COL_NAMES\""
+);
+
+CREATE TABLE IF NOT EXISTS `SKEWED_COL_VALUE_LOC_MAP` (
+  `SD_ID` bigint,
+  `STRING_LIST_ID_KID` bigint,
+  `LOCATION` string,
+  CONSTRAINT `SYS_PK_COL_VALUE_LOC_MAP` PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"STRING_LIST_ID_KID\",
+  \"LOCATION\"
+FROM
+  \"SKEWED_COL_VALUE_LOC_MAP\""
+);
+
+CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST` (
+  `STRING_LIST_ID` bigint,
+  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST` PRIMARY KEY (`STRING_LIST_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"STRING_LIST_ID\"
+FROM
+  \"SKEWED_STRING_LIST\""
+);
+
+CREATE TABLE IF NOT EXISTS `SKEWED_STRING_LIST_VALUES` (
+  `STRING_LIST_ID` bigint,
+  `STRING_LIST_VALUE` string,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST_VALUES` PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"STRING_LIST_ID\",
+  \"STRING_LIST_VALUE\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_STRING_LIST_VALUES\""
+);
+
+CREATE TABLE IF NOT EXISTS `SKEWED_VALUES` (
+  `SD_ID_OID` bigint,
+  `STRING_LIST_ID_EID` bigint,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SKEWED_VALUES` PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID_OID\",
+  \"STRING_LIST_ID_EID\",
+  \"INTEGER_IDX\"
+FROM
+  \"SKEWED_VALUES\""
+);
+
+CREATE TABLE IF NOT EXISTS `SORT_COLS` (
+  `SD_ID` bigint,
+  `COLUMN_NAME` string,
+  `ORDER` int,
+  `INTEGER_IDX` int,
+  CONSTRAINT `SYS_PK_SORT_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"SD_ID\",
+  \"COLUMN_NAME\",
+  \"ORDER\",
+  \"INTEGER_IDX\"
+FROM
+  \"SORT_COLS\""
+);
+
+CREATE TABLE IF NOT EXISTS `TABLE_PARAMS` (
+  `TBL_ID` bigint,
+  `PARAM_KEY` string,
+  `PARAM_VALUE` string,
+  CONSTRAINT `SYS_PK_TABLE_PARAMS` PRIMARY KEY (`TBL_ID`,`PARAM_KEY`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"PARAM_KEY\",
+  \"PARAM_VALUE\"
+FROM
+  \"TABLE_PARAMS\""
+);
+
+CREATE TABLE IF NOT EXISTS `TBLS` (
+  `TBL_ID` bigint,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `LAST_ACCESS_TIME` int,
+  `OWNER` string,
+  `RETENTION` int,
+  `SD_ID` bigint,
+  `TBL_NAME` string,
+  `TBL_TYPE` string,
+  `VIEW_EXPANDED_TEXT` string,
+  `VIEW_ORIGINAL_TEXT` string,
+  `IS_REWRITE_ENABLED` boolean,
+  CONSTRAINT `SYS_PK_TBLS` PRIMARY KEY (`TBL_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_ID\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"LAST_ACCESS_TIME\",
+  \"OWNER\",
+  \"RETENTION\",
+  \"SD_ID\",
+  \"TBL_NAME\",
+  \"TBL_TYPE\",
+  \"VIEW_EXPANDED_TEXT\",
+  \"VIEW_ORIGINAL_TEXT\",
+  \"IS_REWRITE_ENABLED\"
+FROM TBLS"
+);
+
+CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
+  `TBL_COLUMN_GRANT_ID` bigint,
+  `COLUMN_NAME` string,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `TBL_COL_PRIV` string,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_TBL_COL_PRIVS` PRIMARY KEY (`TBL_COLUMN_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_COLUMN_GRANT_ID\",
+  \"COLUMN_NAME\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"TBL_COL_PRIV\",
+  \"TBL_ID\"
+FROM
+  \"TBL_COL_PRIVS\""
+);
+
+CREATE TABLE IF NOT EXISTS `TBL_PRIVS` (
+  `TBL_GRANT_ID` bigint,
+  `CREATE_TIME` int,
+  `GRANT_OPTION` int,
+  `GRANTOR` string,
+  `GRANTOR_TYPE` string,
+  `PRINCIPAL_NAME` string,
+  `PRINCIPAL_TYPE` string,
+  `TBL_PRIV` string,
+  `TBL_ID` bigint,
+  CONSTRAINT `SYS_PK_TBL_PRIVS` PRIMARY KEY (`TBL_GRANT_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"TBL_GRANT_ID\",
+  \"CREATE_TIME\",
+  \"GRANT_OPTION\",
+  \"GRANTOR\",
+  \"GRANTOR_TYPE\",
+  \"PRINCIPAL_NAME\",
+  \"PRINCIPAL_TYPE\",
+  \"TBL_PRIV\",
+  \"TBL_ID\"
+FROM
+  \"TBL_PRIVS\""
+);
+
+CREATE TABLE IF NOT EXISTS `TAB_COL_STATS` (
+ `CS_ID` bigint,
+ `DB_NAME` string,
+ `TABLE_NAME` string,
+ `COLUMN_NAME` string,
+ `COLUMN_TYPE` string,
+ `TBL_ID` bigint,
+ `LONG_LOW_VALUE` bigint,
+ `LONG_HIGH_VALUE` bigint,
+ `DOUBLE_HIGH_VALUE` double,
+ `DOUBLE_LOW_VALUE` double,
+ `BIG_DECIMAL_LOW_VALUE` string,
+ `BIG_DECIMAL_HIGH_VALUE` string,
+ `NUM_NULLS` bigint,
+ `NUM_DISTINCTS` bigint,
+ `AVG_COL_LEN` double,
+ `MAX_COL_LEN` bigint,
+ `NUM_TRUES` bigint,
+ `NUM_FALSES` bigint,
+ `LAST_ANALYZED` bigint,
+  CONSTRAINT `SYS_PK_TAB_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+ \"CS_ID\",
+ \"DB_NAME\",
+ \"TABLE_NAME\",
+ \"COLUMN_NAME\",
+ \"COLUMN_TYPE\",
+ \"TBL_ID\",
+ \"LONG_LOW_VALUE\",
+ \"LONG_HIGH_VALUE\",
+ \"DOUBLE_HIGH_VALUE\",
+ \"DOUBLE_LOW_VALUE\",
+ \"BIG_DECIMAL_LOW_VALUE\",
+ \"BIG_DECIMAL_HIGH_VALUE\",
+ \"NUM_NULLS\",
+ \"NUM_DISTINCTS\",
+ \"AVG_COL_LEN\",
+ \"MAX_COL_LEN\",
+ \"NUM_TRUES\",
+ \"NUM_FALSES\",
+ \"LAST_ANALYZED\"
+FROM
+  \"TAB_COL_STATS\""
+);
+
+CREATE TABLE IF NOT EXISTS `PART_COL_STATS` (
+ `CS_ID` bigint,
+ `DB_NAME` string,
+ `TABLE_NAME` string,
+ `PARTITION_NAME` string,
+ `COLUMN_NAME` string,
+ `COLUMN_TYPE` string,
+ `PART_ID` bigint,
+ `LONG_LOW_VALUE` bigint,
+ `LONG_HIGH_VALUE` bigint,
+ `DOUBLE_HIGH_VALUE` double,
+ `DOUBLE_LOW_VALUE` double,
+ `BIG_DECIMAL_LOW_VALUE` string,
+ `BIG_DECIMAL_HIGH_VALUE` string,
+ `NUM_NULLS` bigint,
+ `NUM_DISTINCTS` bigint,
+ `AVG_COL_LEN` double,
+ `MAX_COL_LEN` bigint,
+ `NUM_TRUES` bigint,
+ `NUM_FALSES` bigint,
+ `LAST_ANALYZED` bigint,
+  CONSTRAINT `SYS_PK_PART_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+ \"CS_ID\",
+ \"DB_NAME\",
+ \"TABLE_NAME\",
+ \"PARTITION_NAME\",
+ \"COLUMN_NAME\",
+ \"COLUMN_TYPE\",
+ \"PART_ID\",
+ \"LONG_LOW_VALUE\",
+ \"LONG_HIGH_VALUE\",
+ \"DOUBLE_HIGH_VALUE\",
+ \"DOUBLE_LOW_VALUE\",
+ \"BIG_DECIMAL_LOW_VALUE\",
+ \"BIG_DECIMAL_HIGH_VALUE\",
+ \"NUM_NULLS\",
+ \"NUM_DISTINCTS\",
+ \"AVG_COL_LEN\",
+ \"MAX_COL_LEN\",
+ \"NUM_TRUES\",
+ \"NUM_FALSES\",
+ \"LAST_ANALYZED\"
+FROM
+  \"PART_COL_STATS\""
+);
+
+CREATE TABLE IF NOT EXISTS `VERSION` (
+  `VER_ID` BIGINT,
+  `SCHEMA_VERSION` string,
+  `VERSION_COMMENT` string,
+  CONSTRAINT `SYS_PK_VERSION` PRIMARY KEY (`VER_ID`) DISABLE NOVALIDATE
+);
+
+INSERT INTO `VERSION` VALUES (1, '3.0.0', 'Hive release version 3.0.0');
+
+CREATE TABLE IF NOT EXISTS `DB_VERSION` (
+  `VER_ID` BIGINT,
+  `SCHEMA_VERSION` string,
+  `VERSION_COMMENT` string,
+  CONSTRAINT `SYS_PK_DB_VERSION` PRIMARY KEY (`VER_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"VER_ID\",
+  \"SCHEMA_VERSION\",
+  \"VERSION_COMMENT\"
+FROM
+  \"VERSION\""
+);
+
+CREATE TABLE IF NOT EXISTS `FUNCS` (
+  `FUNC_ID` bigint,
+  `CLASS_NAME` string,
+  `CREATE_TIME` int,
+  `DB_ID` bigint,
+  `FUNC_NAME` string,
+  `FUNC_TYPE` int,
+  `OWNER_NAME` string,
+  `OWNER_TYPE` string,
+  CONSTRAINT `SYS_PK_FUNCS` PRIMARY KEY (`FUNC_ID`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"FUNC_ID\",
+  \"CLASS_NAME\",
+  \"CREATE_TIME\",
+  \"DB_ID\",
+  \"FUNC_NAME\",
+  \"FUNC_TYPE\",
+  \"OWNER_NAME\",
+  \"OWNER_TYPE\"
+FROM
+  \"FUNCS\""
+);
+
+-- CREATE TABLE IF NOT EXISTS `FUNC_RU` (
+--   `FUNC_ID` bigint,
+--   `RESOURCE_TYPE` int,
+--   `RESOURCE_URI` string,
+--   `INTEGER_IDX` int,
+--   CONSTRAINT `SYS_PK_FUNCS_RU` PRIMARY KEY (`FUNC_ID`, `INTEGER_IDX`) DISABLE NOVALIDATE
+-- )
+-- STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+-- TBLPROPERTIES (
+-- "hive.sql.database.type" = "METASTORE",
+-- "hive.sql.query" = "SELECT * FROM FUNCS_RU"
+-- );
+
+CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
+(
+  `CHILD_CD_ID` bigint,
+  `CHILD_INTEGER_IDX` int,
+  `CHILD_TBL_ID` bigint,
+  `PARENT_CD_ID` bigint,
+  `PARENT_INTEGER_IDX` int,
+  `PARENT_TBL_ID` bigint,
+  `POSITION` bigint,
+  `CONSTRAINT_NAME` string,
+  `CONSTRAINT_TYPE` string,
+  `UPDATE_RULE` string,
+  `DELETE_RULE` string,
+  `ENABLE_VALIDATE_RELY` int,
+  CONSTRAINT `SYS_PK_KEY_CONSTRAINTS` PRIMARY KEY (`CONSTRAINT_NAME`, `POSITION`) DISABLE NOVALIDATE
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  \"CHILD_CD_ID\",
+  \"CHILD_INTEGER_IDX\",
+  \"CHILD_TBL_ID\",
+  \"PARENT_CD_ID\",
+  \"PARENT_INTEGER_IDX\",
+  \"PARENT_TBL_ID\",
+  \"POSITION\",
+  \"CONSTRAINT_NAME\",
+  \"CONSTRAINT_TYPE\",
+  \"UPDATE_RULE\",
+  \"DELETE_RULE\",
+  \"ENABLE_VALIDATE_RELY\"
+FROM
+  \"KEY_CONSTRAINTS\""
+);
+
+CREATE DATABASE INFORMATION_SCHEMA;
+
+USE INFORMATION_SCHEMA;
+
+CREATE VIEW IF NOT EXISTS `SCHEMATA`
+(
+  `CATALOG_NAME`,
+  `SCHEMA_NAME`,
+  `SCHEMA_OWNER`,
+  `DEFAULT_CHARACTER_SET_CATALOG`,
+  `DEFAULT_CHARACTER_SET_SCHEMA`,
+  `DEFAULT_CHARACTER_SET_NAME`,
+  `SQL_PATH`
+) AS
+SELECT
+  'default',
+  `NAME`,
+  `OWNER_NAME`,
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  `DB_LOCATION_URI`
+FROM
+  sys.DBS;
+
+CREATE VIEW IF NOT EXISTS `TABLES`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `TABLE_TYPE`,
+  `SELF_REFERENCING_COLUMN_NAME`,
+  `REFERENCE_GENERATION`,
+  `USER_DEFINED_TYPE_CATALOG`,
+  `USER_DEFINED_TYPE_SCHEMA`,
+  `USER_DEFINED_TYPE_NAME`,
+  `IS_INSERTABLE_INTO`,
+  `IS_TYPED`,
+  `COMMIT_ACTION`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'VIEW', 'BASE_TABLE'),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  cast(null as string),
+  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'NO', 'YES'),
+  'NO',
+  cast(null as string)
+FROM
+  `sys`.`TBLS` T, `sys`.`DBS` D
+WHERE
+  D.`DB_ID` = T.`DB_ID`;
+
+CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES`
+(
+  `GRANTOR`,
+  `GRANTEE`,
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `PRIVILEGE_TYPE`,
+  `IS_GRANTABLE`,
+  `WITH_HIERARCHY`
+) AS
+SELECT
+  `GRANTOR`,
+  `PRINCIPAL_NAME`,
+  'default',
+  D.`NAME`,
+  T.`TBL_NAME`,
+  P.`TBL_PRIV`,
+  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
+  'NO'
+FROM
+  sys.`TBL_PRIVS` P,
+  sys.`TBLS` T,
+  sys.`DBS` D
+WHERE
+  P.TBL_ID = T.TBL_ID
+  AND T.DB_ID = D.DB_ID;
+
+CREATE VIEW IF NOT EXISTS `COLUMNS`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `COLUMN_NAME`,
+  `ORDINAL_POSITION`,
+  `COLUMN_DEFAULT`,
+  `IS_NULLABLE`,
+  `DATA_TYPE`,
+  `CHARACTER_MAXIMUM_LENGTH`,
+  `CHARACTER_OCTET_LENGTH`,
+  `NUMERIC_PRECISION`,
+  `NUMERIC_PRECISION_RADIX`,
+  `NUMERIC_SCALE`,
+  `DATETIME_PRECISION`,
+  `INTERVAL_TYPE`,
+  `INTERVAL_PRECISION`,
+  `CHARACTER_SET_CATALOG`,
+  `CHARACTER_SET_SCHEMA`,
+  `CHARACTER_SET_NAME`,
+  `COLLATION_CATALOG`,
+  `COLLATION_SCHEMA`,
+  `COLLATION_NAME`,
+  `UDT_CATALOG`,
+  `UDT_SCHEMA`,
+  `UDT_NAME`,
+  `SCOPE_CATALOG`,
+  `SCOPE_SCHEMA`,
+  `SCOPE_NAME`,
+  `MAXIMUM_CARDINALITY`,
+  `DTD_IDENTIFIER`,
+  `IS_SELF_REFERENCING`,
+  `IS_IDENTITY`,
+  `IDENTITY_GENERATION`,
+  `IDENTITY_START`,
+  `IDENTITY_INCREMENT`,
+  `IDENTITY_MAXIMUM`,
+  `IDENTITY_MINIMUM`,
+  `IDENTITY_CYCLE`,
+  `IS_GENERATED`,
+  `GENERATION_EXPRESSION`,
+  `IS_SYSTEM_TIME_PERIOD_START`,
+  `IS_SYSTEM_TIME_PERIOD_END`,
+  `SYSTEM_TIME_PERIOD_TIMESTAMP_GENERATION`,
+  `IS_UPDATABLE`,
+  `DECLARED_DATA_TYPE`,
+  `DECLARED_NUMERIC_PRECISION`,
+  `DECLARED_NUMERIC_SCALE`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  C.COLUMN_NAME,
+  C.INTEGER_IDX,
+  cast (null as string),
+  'YES',
+  C.TYPE_NAME as TYPE_NAME,
+  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+),(\\d+)',2)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+),(\\d+)',2)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'date' THEN 0
+       WHEN lower(C.TYPE_NAME) = 'timestamp' THEN 9
+       ELSE null END,
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  C.CD_ID,
+  'NO',
+  'NO',
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  cast (null as string),
+  'NEVER',
+  cast (null as string),
+  'NO',
+  'NO',
+  cast (null as string),
+  'YES',
+  C.TYPE_NAME as DECLARED_DATA_TYPE,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+       ELSE null END,
+  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+       ELSE null END
+FROM
+  sys.`COLUMNS_V2` C,
+  sys.`SDS` S,
+  sys.`TBLS` T,
+  sys.`DBS` D
+WHERE
+  S.`SD_ID` = T.`SD_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND C.`CD_ID` = S.`CD_ID`;
+
+CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES`
+(
+  `GRANTOR`,
+  `GRANTEE`,
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `COLUMN_NAME`,
+  `PRIVILEGE_TYPE`,
+  `IS_GRANTABLE`
+) AS
+SELECT
+  `GRANTOR`,
+  `PRINCIPAL_NAME`,
+  'default',
+  D.`NAME`,
+  T.`TBL_NAME`,
+  C.`COLUMN_NAME`,
+  P.`TBL_COL_PRIV`,
+  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
+FROM
+  sys.`TBL_COL_PRIVS` P,
+  sys.`TBLS` T,
+  sys.`DBS` D,
+  sys.`COLUMNS_V2` C,
+  sys.`SDS` S
+WHERE
+  S.`SD_ID` = T.`SD_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND P.`TBL_ID` = T.`TBL_ID`
+  AND P.`COLUMN_NAME` = C.`COLUMN_NAME`
+  AND C.`CD_ID` = S.`CD_ID`;
+
+CREATE VIEW IF NOT EXISTS `VIEWS`
+(
+  `TABLE_CATALOG`,
+  `TABLE_SCHEMA`,
+  `TABLE_NAME`,
+  `VIEW_DEFINITION`,
+  `CHECK_OPTION`,
+  `IS_UPDATABLE`,
+  `IS_INSERTABLE_INTO`,
+  `IS_TRIGGER_UPDATABLE`,
+  `IS_TRIGGER_DELETABLE`,
+  `IS_TRIGGER_INSERTABLE_INTO`
+) AS
+SELECT
+  'default',
+  D.NAME,
+  T.TBL_NAME,
+  T.VIEW_ORIGINAL_TEXT,
+  CAST(NULL as string),
+  false,
+  false,
+  false,
+  false,
+  false
+FROM
+  `sys`.`DBS` D,
+  `sys`.`TBLS` T
+WHERE
+   D.`DB_ID` = T.`DB_ID` AND
+   length(T.VIEW_ORIGINAL_TEXT) > 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index 4d727ba..f3c571a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -357,8 +357,16 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
       taskId = Utilities.getTaskId(hconf);
       initializeSpecPath();
       fs = specPath.getFileSystem(hconf);
+
+      if (hconf instanceof JobConf) {
+        jc = (JobConf) hconf;
+      } else {
+        // test code path
+        jc = new JobConf(hconf);
+      }
+
       try {
-        createHiveOutputFormat(hconf);
+        createHiveOutputFormat(jc);
       } catch (HiveException ex) {
         logOutputFormatError(hconf, ex);
         throw ex;
@@ -379,12 +387,6 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
       // half of the script.timeout but less than script.timeout, we will still
       // be able to report progress.
       timeOut = hconf.getInt("mapred.healthChecker.script.timeout", 600000) / 2;
-      if (hconf instanceof JobConf) {
-        jc = (JobConf) hconf;
-      } else {
-        // test code path
-        jc = new JobConf(hconf);
-      }
 
       if (multiFileSpray) {
         partitionEval = new ExprNodeEvaluator[conf.getPartitionCols().size()];
@@ -1158,12 +1160,12 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
     }
   }
 
-  private void createHiveOutputFormat(Configuration hconf) throws HiveException {
+  private void createHiveOutputFormat(JobConf job) throws HiveException {
     if (hiveOutputFormat == null) {
-      Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), hconf);
+      Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), job);
     }
     try {
-      hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, getConf().getTableInfo());
+      hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(job, getConf().getTableInfo());
     } catch (Throwable t) {
       throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 9036d9e..ebf1344 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -145,6 +145,8 @@ import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hive.common.util.ACLConfigurationParser;
 import org.apache.hive.common.util.ReflectionUtil;
@@ -2027,7 +2029,7 @@ public final class Utilities {
    * @param job
    *          configuration which receives configured properties
    */
-  public static void copyTableJobPropertiesToConf(TableDesc tbl, Configuration job) {
+  public static void copyTableJobPropertiesToConf(TableDesc tbl, JobConf job) throws HiveException {
     Properties tblProperties = tbl.getProperties();
     for(String name: tblProperties.stringPropertyNames()) {
       if (job.get(name) == null) {
@@ -2038,11 +2040,23 @@ public final class Utilities {
       }
     }
     Map<String, String> jobProperties = tbl.getJobProperties();
-    if (jobProperties == null) {
-      return;
+    if (jobProperties != null) {
+      for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
+        job.set(entry.getKey(), entry.getValue());
+      }
     }
-    for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
-      job.set(entry.getKey(), entry.getValue());
+
+    try {
+      Map<String, String> jobSecrets = tbl.getJobSecrets();
+      if (jobSecrets != null) {
+        for (Map.Entry<String, String> entry : jobSecrets.entrySet()) {
+          job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
+          UserGroupInformation.getCurrentUser().getCredentials()
+            .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
+        }
+      }
+    } catch (IOException e) {
+      throw new HiveException(e);
     }
   }
 
@@ -2055,7 +2069,7 @@ public final class Utilities {
    * @param tbl
    * @param job
    */
-  public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) {
+  public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) throws HiveException {
     Properties tblProperties = tbl.getProperties();
     for(String name: tblProperties.stringPropertyNames()) {
       String val = (String) tblProperties.get(name);
@@ -2064,11 +2078,23 @@ public final class Utilities {
       }
     }
     Map<String, String> jobProperties = tbl.getJobProperties();
-    if (jobProperties == null) {
-      return;
+    if (jobProperties != null) {
+      for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
+        job.set(entry.getKey(), entry.getValue());
+      }
     }
-    for (Map.Entry<String, String> entry : jobProperties.entrySet()) {
-      job.set(entry.getKey(), entry.getValue());
+
+    try {
+      Map<String, String> jobSecrets = tbl.getJobSecrets();
+      if (jobSecrets != null) {
+        for (Map.Entry<String, String> entry : jobSecrets.entrySet()) {
+          job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
+          UserGroupInformation.getCurrentUser().getCredentials()
+            .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
+        }
+      }
+    } catch (IOException e) {
+      throw new HiveException(e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/77f44b66/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
index 6497495..b0457be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
@@ -1201,7 +1201,8 @@ public class DagUtils {
   /**
    * Set up credentials for the base work on secure clusters
    */
-  public void addCredentials(BaseWork work, DAG dag) {
+  public void addCredentials(BaseWork work, DAG dag) throws IOException {
+    dag.setCredentials(UserGroupInformation.getCurrentUser().getCredentials());
     if (work instanceof MapWork) {
       addCredentials((MapWork) work, dag);
     } else if (work instanceof ReduceWork) {


[41/50] [abbrv] hive git commit: HIVE-15726: Reenable indentation checks to checkstyle (Peter Vary via Zoltan Haindrich)

Posted by we...@apache.org.
HIVE-15726: Reenable indentation checks to checkstyle (Peter Vary via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1b8ba022
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1b8ba022
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1b8ba022

Branch: refs/heads/hive-14535
Commit: 1b8ba022c26ef929f35dc12c5c70e1683fa2e373
Parents: 0ce98b3
Author: Peter Vary <pv...@cloudera.com>
Authored: Tue May 16 08:13:28 2017 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Tue May 16 08:13:28 2017 +0200

----------------------------------------------------------------------
 checkstyle/checkstyle.xml | 8 +-------
 1 file changed, 1 insertion(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/1b8ba022/checkstyle/checkstyle.xml
----------------------------------------------------------------------
diff --git a/checkstyle/checkstyle.xml b/checkstyle/checkstyle.xml
index bd6b2f8..82b0a28 100644
--- a/checkstyle/checkstyle.xml
+++ b/checkstyle/checkstyle.xml
@@ -208,17 +208,11 @@
     <!-- Miscellaneous other checks.                   -->
     <!-- See http://checkstyle.sf.net/config_misc.html -->
     <module name="ArrayTypeStyle"/>
-
-    <!-- We like to indent "throws" clauses by 4 spaces -->
-    <!-- which is a nuance that Checkstyle does not     -->
-    <!-- support. Disabling Indentation check until we  -->
-    <!-- can figure out something else.                 -->
-    <!--
     <module name="Indentation">
       <property name="basicOffset" value="2" />
       <property name="caseIndent" value="0" />
+      <property name="throwsIndent" value="4" />
     </module>
-    -->
     <module name="TodoComment"/>
     <module name="UpperEll"/>
     


[14/50] [abbrv] hive git commit: HIVE-16602: Implement shared scans with Tez (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by we...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query51.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query51.q.out b/ql/src/test/results/clientpositive/perf/query51.q.out
index 7da09ba..2468c77 100644
--- a/ql/src/test/results/clientpositive/perf/query51.q.out
+++ b/ql/src/test/results/clientpositive/perf/query51.q.out
@@ -87,13 +87,13 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 10 <- Reducer 9 (SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
 Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Reducer 10 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
 Reducer 6 <- Reducer 5 (SIMPLE_EDGE)
-Reducer 9 <- Map 11 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 10 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -120,40 +120,6 @@ Stage-0
                         PartitionCols:CASE WHEN (_col3 is not null) THEN (_col3) ELSE (_col0) END
                         Merge Join Operator [MERGEJOIN_67] (rows=348477374 width=88)
                           Conds:RS_40._col0, _col1=RS_41._col0, _col1(Outer),Output:["_col0","_col1","_col2","_col3","_col4","_col5"]
-                        <-Reducer 10 [SIMPLE_EDGE]
-                          SHUFFLE [RS_41]
-                            PartitionCols:_col0, _col1
-                            Select Operator [SEL_37] (rows=79201469 width=135)
-                              Output:["_col0","_col1","_col2"]
-                              PTF Operator [PTF_36] (rows=79201469 width=135)
-                                Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}]
-                                Group By Operator [GBY_32] (rows=79201469 width=135)
-                                  Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
-                                <-Reducer 9 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_31]
-                                    PartitionCols:_col0
-                                    Group By Operator [GBY_30] (rows=158402938 width=135)
-                                      Output:["_col0","_col1","_col2"],aggregations:["sum(_col2)"],keys:_col1, _col4
-                                      Merge Join Operator [MERGEJOIN_66] (rows=158402938 width=135)
-                                        Conds:RS_26._col0=RS_27._col0(Inner),Output:["_col1","_col2","_col4"]
-                                      <-Map 11 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_27]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_25] (rows=8116 width=1119)
-                                            Output:["_col0","_col1"]
-                                            Filter Operator [FIL_62] (rows=8116 width=1119)
-                                              predicate:(d_month_seq BETWEEN 1193 AND 1204 and d_date_sk is not null)
-                                              TableScan [TS_23] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
-                                      <-Map 8 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_26]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_22] (rows=144002668 width=135)
-                                            Output:["_col0","_col1","_col2"]
-                                            Filter Operator [FIL_61] (rows=144002668 width=135)
-                                              predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
-                                              TableScan [TS_20] (rows=144002668 width=135)
-                                                default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_sales_price"]
                         <-Reducer 3 [SIMPLE_EDGE]
                           SHUFFLE [RS_40]
                             PartitionCols:_col0, _col1
@@ -170,6 +136,15 @@ Stage-0
                                       Output:["_col0","_col1","_col2"],aggregations:["sum(_col2)"],keys:_col1, _col4
                                       Merge Join Operator [MERGEJOIN_65] (rows=633595212 width=88)
                                         Conds:RS_6._col0=RS_7._col0(Inner),Output:["_col1","_col2","_col4"]
+                                      <-Map 7 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_7]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_5] (rows=8116 width=1119)
+                                            Output:["_col0","_col1"]
+                                            Filter Operator [FIL_60] (rows=8116 width=1119)
+                                              predicate:(d_month_seq BETWEEN 1193 AND 1204 and d_date_sk is not null)
+                                              TableScan [TS_3] (rows=73049 width=1119)
+                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
                                       <-Map 1 [SIMPLE_EDGE]
                                         SHUFFLE [RS_6]
                                           PartitionCols:_col0
@@ -179,13 +154,37 @@ Stage-0
                                               predicate:(ss_item_sk is not null and ss_sold_date_sk is not null)
                                               TableScan [TS_0] (rows=575995635 width=88)
                                                 default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_sales_price"]
+                        <-Reducer 9 [SIMPLE_EDGE]
+                          SHUFFLE [RS_41]
+                            PartitionCols:_col0, _col1
+                            Select Operator [SEL_37] (rows=79201469 width=135)
+                              Output:["_col0","_col1","_col2"]
+                              PTF Operator [PTF_36] (rows=79201469 width=135)
+                                Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}]
+                                Group By Operator [GBY_32] (rows=79201469 width=135)
+                                  Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1
+                                <-Reducer 8 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_31]
+                                    PartitionCols:_col0
+                                    Group By Operator [GBY_30] (rows=158402938 width=135)
+                                      Output:["_col0","_col1","_col2"],aggregations:["sum(_col2)"],keys:_col1, _col4
+                                      Merge Join Operator [MERGEJOIN_66] (rows=158402938 width=135)
+                                        Conds:RS_26._col0=RS_27._col0(Inner),Output:["_col1","_col2","_col4"]
                                       <-Map 7 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_7]
+                                        SHUFFLE [RS_27]
                                           PartitionCols:_col0
-                                          Select Operator [SEL_5] (rows=8116 width=1119)
+                                          Select Operator [SEL_25] (rows=8116 width=1119)
                                             Output:["_col0","_col1"]
-                                            Filter Operator [FIL_60] (rows=8116 width=1119)
+                                            Filter Operator [FIL_62] (rows=8116 width=1119)
                                               predicate:(d_month_seq BETWEEN 1193 AND 1204 and d_date_sk is not null)
-                                              TableScan [TS_3] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date","d_month_seq"]
+                                               Please refer to the previous TableScan [TS_3]
+                                      <-Map 10 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_26]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_22] (rows=144002668 width=135)
+                                            Output:["_col0","_col1","_col2"]
+                                            Filter Operator [FIL_61] (rows=144002668 width=135)
+                                              predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
+                                              TableScan [TS_20] (rows=144002668 width=135)
+                                                default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_sales_price"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query56.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query56.q.out b/ql/src/test/results/clientpositive/perf/query56.q.out
index 4fa28c2..9e710e0 100644
--- a/ql/src/test/results/clientpositive/perf/query56.q.out
+++ b/ql/src/test/results/clientpositive/perf/query56.q.out
@@ -133,26 +133,26 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 11 <- Map 10 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
-Reducer 12 <- Map 14 (SIMPLE_EDGE), Reducer 11 (SIMPLE_EDGE)
-Reducer 16 <- Map 15 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
-Reducer 17 <- Reducer 16 (SIMPLE_EDGE), Reducer 23 (SIMPLE_EDGE)
-Reducer 18 <- Reducer 17 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 20 <- Map 19 (SIMPLE_EDGE)
-Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 24 (SIMPLE_EDGE)
-Reducer 23 <- Map 25 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
-Reducer 27 <- Map 26 (SIMPLE_EDGE), Reducer 31 (SIMPLE_EDGE)
-Reducer 28 <- Reducer 27 (SIMPLE_EDGE), Reducer 34 (SIMPLE_EDGE)
-Reducer 29 <- Reducer 28 (SIMPLE_EDGE), Union 5 (CONTAINS)
-Reducer 3 <- Reducer 12 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 31 <- Map 30 (SIMPLE_EDGE)
-Reducer 33 <- Map 32 (SIMPLE_EDGE), Map 35 (SIMPLE_EDGE)
-Reducer 34 <- Map 36 (SIMPLE_EDGE), Reducer 33 (SIMPLE_EDGE)
+Reducer 10 <- Reducer 9 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 11 <- Map 1 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
+Reducer 12 <- Reducer 11 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE)
+Reducer 13 <- Reducer 12 (SIMPLE_EDGE), Union 5 (CONTAINS)
+Reducer 15 <- Map 14 (SIMPLE_EDGE)
+Reducer 16 <- Map 14 (SIMPLE_EDGE)
+Reducer 17 <- Map 14 (SIMPLE_EDGE)
+Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 21 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
+Reducer 20 <- Map 26 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 22 <- Map 21 (SIMPLE_EDGE), Map 27 (SIMPLE_EDGE)
+Reducer 23 <- Map 26 (SIMPLE_EDGE), Reducer 22 (SIMPLE_EDGE)
+Reducer 24 <- Map 21 (SIMPLE_EDGE), Map 28 (SIMPLE_EDGE)
+Reducer 25 <- Map 26 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
+Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 20 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 3 (SIMPLE_EDGE), Union 5 (CONTAINS)
 Reducer 6 <- Union 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
-Reducer 9 <- Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
+Reducer 9 <- Reducer 23 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -169,50 +169,20 @@ Stage-0
               Group By Operator [GBY_117] (rows=335408073 width=108)
                 Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
               <-Union 5 [SIMPLE_EDGE]
-                <-Reducer 18 [CONTAINS]
+                <-Reducer 10 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
                     Group By Operator [GBY_115] (rows=670816147 width=108)
                       Output:["_col0","_col1"],aggregations:["sum(_col1)"],keys:_col0
                       Group By Operator [GBY_72] (rows=191657247 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 17 [SIMPLE_EDGE]
+                      <-Reducer 9 [SIMPLE_EDGE]
                         SHUFFLE [RS_71]
                           PartitionCols:_col0
                           Group By Operator [GBY_70] (rows=383314495 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_184] (rows=383314495 width=135)
                               Conds:RS_66._col0=RS_67._col4(Inner),Output:["_col1","_col8"]
-                            <-Reducer 16 [SIMPLE_EDGE]
-                              SHUFFLE [RS_66]
-                                PartitionCols:_col0
-                                Merge Join Operator [MERGEJOIN_177] (rows=508200 width=1436)
-                                  Conds:RS_63._col1=RS_64._col0(Inner),Output:["_col0","_col1"]
-                                <-Map 15 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_63]
-                                    PartitionCols:_col1
-                                    Select Operator [SEL_39] (rows=462000 width=1436)
-                                      Output:["_col0","_col1"]
-                                      Filter Operator [FIL_164] (rows=462000 width=1436)
-                                        predicate:(i_item_id is not null and i_item_sk is not null)
-                                        TableScan [TS_37] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                                <-Reducer 20 [SIMPLE_EDGE]
-                                  SHUFFLE [RS_64]
-                                    PartitionCols:_col0
-                                    Group By Operator [GBY_45] (rows=115500 width=1436)
-                                      Output:["_col0"],keys:KEY._col0
-                                    <-Map 19 [SIMPLE_EDGE]
-                                      SHUFFLE [RS_44]
-                                        PartitionCols:_col0
-                                        Group By Operator [GBY_43] (rows=231000 width=1436)
-                                          Output:["_col0"],keys:i_item_id
-                                          Select Operator [SEL_42] (rows=231000 width=1436)
-                                            Output:["i_item_id"]
-                                            Filter Operator [FIL_165] (rows=231000 width=1436)
-                                              predicate:((i_color) IN ('orchid', 'chiffon', 'lace') and i_item_id is not null)
-                                              TableScan [TS_40] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_color"]
                             <-Reducer 23 [SIMPLE_EDGE]
                               SHUFFLE [RS_67]
                                 PartitionCols:_col4
@@ -220,14 +190,14 @@ Stage-0
                                   Output:["_col4","_col5"]
                                   Merge Join Operator [MERGEJOIN_179] (rows=348467716 width=135)
                                     Conds:RS_59._col1=RS_60._col0(Inner),Output:["_col2","_col3"]
-                                  <-Map 25 [SIMPLE_EDGE]
+                                  <-Map 26 [SIMPLE_EDGE]
                                     SHUFFLE [RS_60]
                                       PartitionCols:_col0
                                       Select Operator [SEL_55] (rows=20000000 width=1014)
                                         Output:["_col0"]
                                         Filter Operator [FIL_168] (rows=20000000 width=1014)
                                           predicate:((ca_gmt_offset = -8) and ca_address_sk is not null)
-                                          TableScan [TS_53] (rows=40000000 width=1014)
+                                          TableScan [TS_16] (rows=40000000 width=1014)
                                             default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
                                   <-Reducer 22 [SIMPLE_EDGE]
                                     SHUFFLE [RS_59]
@@ -235,6 +205,15 @@ Stage-0
                                       Merge Join Operator [MERGEJOIN_178] (rows=316788826 width=135)
                                         Conds:RS_56._col0=RS_57._col0(Inner),Output:["_col1","_col2","_col3"]
                                       <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_57]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_52] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_167] (rows=18262 width=1119)
+                                              predicate:((d_year = 2000) and (d_moy = 1) and d_date_sk is not null)
+                                              TableScan [TS_13] (rows=73049 width=1119)
+                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
+                                      <-Map 27 [SIMPLE_EDGE]
                                         SHUFFLE [RS_56]
                                           PartitionCols:_col0
                                           Select Operator [SEL_49] (rows=287989836 width=135)
@@ -243,49 +222,69 @@ Stage-0
                                               predicate:(cs_sold_date_sk is not null and cs_bill_addr_sk is not null and cs_item_sk is not null)
                                               TableScan [TS_47] (rows=287989836 width=135)
                                                 default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_addr_sk","cs_item_sk","cs_ext_sales_price"]
-                                      <-Map 24 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_57]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_52] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_167] (rows=18262 width=1119)
-                                              predicate:((d_year = 2000) and (d_moy = 1) and d_date_sk is not null)
-                                              TableScan [TS_50] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
-                <-Reducer 29 [CONTAINS]
+                            <-Reducer 8 [SIMPLE_EDGE]
+                              SHUFFLE [RS_66]
+                                PartitionCols:_col0
+                                Merge Join Operator [MERGEJOIN_177] (rows=508200 width=1436)
+                                  Conds:RS_63._col1=RS_64._col0(Inner),Output:["_col0","_col1"]
+                                <-Map 1 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_63]
+                                    PartitionCols:_col1
+                                    Select Operator [SEL_39] (rows=462000 width=1436)
+                                      Output:["_col0","_col1"]
+                                      Filter Operator [FIL_164] (rows=462000 width=1436)
+                                        predicate:(i_item_id is not null and i_item_sk is not null)
+                                        TableScan [TS_0] (rows=462000 width=1436)
+                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
+                                <-Reducer 16 [SIMPLE_EDGE]
+                                  SHUFFLE [RS_64]
+                                    PartitionCols:_col0
+                                    Group By Operator [GBY_45] (rows=115500 width=1436)
+                                      Output:["_col0"],keys:KEY._col0
+                                    <-Map 14 [SIMPLE_EDGE]
+                                      SHUFFLE [RS_44]
+                                        PartitionCols:_col0
+                                        Group By Operator [GBY_43] (rows=231000 width=1436)
+                                          Output:["_col0"],keys:i_item_id
+                                          Select Operator [SEL_42] (rows=231000 width=1436)
+                                            Output:["i_item_id"]
+                                            Filter Operator [FIL_165] (rows=231000 width=1436)
+                                              predicate:((i_color) IN ('orchid', 'chiffon', 'lace') and i_item_id is not null)
+                                              TableScan [TS_3] (rows=462000 width=1436)
+                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_color"]
+                <-Reducer 13 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
                     Group By Operator [GBY_115] (rows=670816147 width=108)
                       Output:["_col0","_col1"],aggregations:["sum(_col1)"],keys:_col0
                       Group By Operator [GBY_111] (rows=95833781 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 28 [SIMPLE_EDGE]
+                      <-Reducer 12 [SIMPLE_EDGE]
                         SHUFFLE [RS_110]
                           PartitionCols:_col0
                           Group By Operator [GBY_109] (rows=191667562 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_185] (rows=191667562 width=135)
                               Conds:RS_105._col0=RS_106._col3(Inner),Output:["_col1","_col8"]
-                            <-Reducer 27 [SIMPLE_EDGE]
+                            <-Reducer 11 [SIMPLE_EDGE]
                               SHUFFLE [RS_105]
                                 PartitionCols:_col0
                                 Merge Join Operator [MERGEJOIN_180] (rows=508200 width=1436)
                                   Conds:RS_102._col1=RS_103._col0(Inner),Output:["_col0","_col1"]
-                                <-Map 26 [SIMPLE_EDGE]
+                                <-Map 1 [SIMPLE_EDGE]
                                   SHUFFLE [RS_102]
                                     PartitionCols:_col1
                                     Select Operator [SEL_78] (rows=462000 width=1436)
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_169] (rows=462000 width=1436)
                                         predicate:(i_item_id is not null and i_item_sk is not null)
-                                        TableScan [TS_76] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                                <-Reducer 31 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_0]
+                                <-Reducer 17 [SIMPLE_EDGE]
                                   SHUFFLE [RS_103]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_84] (rows=115500 width=1436)
                                       Output:["_col0"],keys:KEY._col0
-                                    <-Map 30 [SIMPLE_EDGE]
+                                    <-Map 14 [SIMPLE_EDGE]
                                       SHUFFLE [RS_83]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_82] (rows=231000 width=1436)
@@ -294,30 +293,36 @@ Stage-0
                                             Output:["i_item_id"]
                                             Filter Operator [FIL_170] (rows=231000 width=1436)
                                               predicate:((i_color) IN ('orchid', 'chiffon', 'lace') and i_item_id is not null)
-                                              TableScan [TS_79] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_color"]
-                            <-Reducer 34 [SIMPLE_EDGE]
+                                               Please refer to the previous TableScan [TS_3]
+                            <-Reducer 25 [SIMPLE_EDGE]
                               SHUFFLE [RS_106]
                                 PartitionCols:_col3
                                 Select Operator [SEL_101] (rows=174243235 width=135)
                                   Output:["_col3","_col5"]
                                   Merge Join Operator [MERGEJOIN_182] (rows=174243235 width=135)
                                     Conds:RS_98._col2=RS_99._col0(Inner),Output:["_col1","_col3"]
-                                  <-Map 36 [SIMPLE_EDGE]
+                                  <-Map 26 [SIMPLE_EDGE]
                                     SHUFFLE [RS_99]
                                       PartitionCols:_col0
                                       Select Operator [SEL_94] (rows=20000000 width=1014)
                                         Output:["_col0"]
                                         Filter Operator [FIL_173] (rows=20000000 width=1014)
                                           predicate:((ca_gmt_offset = -8) and ca_address_sk is not null)
-                                          TableScan [TS_92] (rows=40000000 width=1014)
-                                            default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
-                                  <-Reducer 33 [SIMPLE_EDGE]
+                                           Please refer to the previous TableScan [TS_16]
+                                  <-Reducer 24 [SIMPLE_EDGE]
                                     SHUFFLE [RS_98]
                                       PartitionCols:_col2
                                       Merge Join Operator [MERGEJOIN_181] (rows=158402938 width=135)
                                         Conds:RS_95._col0=RS_96._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 32 [SIMPLE_EDGE]
+                                      <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_96]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_91] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_172] (rows=18262 width=1119)
+                                              predicate:((d_year = 2000) and (d_moy = 1) and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_13]
+                                      <-Map 28 [SIMPLE_EDGE]
                                         SHUFFLE [RS_95]
                                           PartitionCols:_col0
                                           Select Operator [SEL_88] (rows=144002668 width=135)
@@ -326,15 +331,6 @@ Stage-0
                                               predicate:(ws_sold_date_sk is not null and ws_bill_addr_sk is not null and ws_item_sk is not null)
                                               TableScan [TS_86] (rows=144002668 width=135)
                                                 default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_bill_addr_sk","ws_ext_sales_price"]
-                                      <-Map 35 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_96]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_91] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_172] (rows=18262 width=1119)
-                                              predicate:((d_year = 2000) and (d_moy = 1) and d_date_sk is not null)
-                                              TableScan [TS_89] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                 <-Reducer 4 [CONTAINS]
                   Reduce Output Operator [RS_116]
                     PartitionCols:_col0
@@ -349,45 +345,6 @@ Stage-0
                             Output:["_col0","_col1"],aggregations:["sum(_col8)"],keys:_col1
                             Merge Join Operator [MERGEJOIN_183] (rows=766650239 width=88)
                               Conds:RS_29._col0=RS_30._col3(Inner),Output:["_col1","_col8"]
-                            <-Reducer 12 [SIMPLE_EDGE]
-                              SHUFFLE [RS_30]
-                                PartitionCols:_col3
-                                Select Operator [SEL_25] (rows=696954748 width=88)
-                                  Output:["_col3","_col5"]
-                                  Merge Join Operator [MERGEJOIN_176] (rows=696954748 width=88)
-                                    Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col3"]
-                                  <-Map 14 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_23]
-                                      PartitionCols:_col0
-                                      Select Operator [SEL_18] (rows=20000000 width=1014)
-                                        Output:["_col0"]
-                                        Filter Operator [FIL_163] (rows=20000000 width=1014)
-                                          predicate:((ca_gmt_offset = -8) and ca_address_sk is not null)
-                                          TableScan [TS_16] (rows=40000000 width=1014)
-                                            default@customer_address,customer_address,Tbl:COMPLETE,Col:NONE,Output:["ca_address_sk","ca_gmt_offset"]
-                                  <-Reducer 11 [SIMPLE_EDGE]
-                                    SHUFFLE [RS_22]
-                                      PartitionCols:_col2
-                                      Merge Join Operator [MERGEJOIN_175] (rows=633595212 width=88)
-                                        Conds:RS_19._col0=RS_20._col0(Inner),Output:["_col1","_col2","_col3"]
-                                      <-Map 10 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_19]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_12] (rows=575995635 width=88)
-                                            Output:["_col0","_col1","_col2","_col3"]
-                                            Filter Operator [FIL_161] (rows=575995635 width=88)
-                                              predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null and ss_item_sk is not null)
-                                              TableScan [TS_10] (rows=575995635 width=88)
-                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
-                                      <-Map 13 [SIMPLE_EDGE]
-                                        SHUFFLE [RS_20]
-                                          PartitionCols:_col0
-                                          Select Operator [SEL_15] (rows=18262 width=1119)
-                                            Output:["_col0"]
-                                            Filter Operator [FIL_162] (rows=18262 width=1119)
-                                              predicate:((d_year = 2000) and (d_moy = 1) and d_date_sk is not null)
-                                              TableScan [TS_13] (rows=73049 width=1119)
-                                                default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
                             <-Reducer 2 [SIMPLE_EDGE]
                               SHUFFLE [RS_29]
                                 PartitionCols:_col0
@@ -400,14 +357,13 @@ Stage-0
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_159] (rows=462000 width=1436)
                                         predicate:(i_item_id is not null and i_item_sk is not null)
-                                        TableScan [TS_0] (rows=462000 width=1436)
-                                          default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                                <-Reducer 9 [SIMPLE_EDGE]
+                                         Please refer to the previous TableScan [TS_0]
+                                <-Reducer 15 [SIMPLE_EDGE]
                                   SHUFFLE [RS_27]
                                     PartitionCols:_col0
                                     Group By Operator [GBY_8] (rows=115500 width=1436)
                                       Output:["_col0"],keys:KEY._col0
-                                    <-Map 8 [SIMPLE_EDGE]
+                                    <-Map 14 [SIMPLE_EDGE]
                                       SHUFFLE [RS_7]
                                         PartitionCols:_col0
                                         Group By Operator [GBY_6] (rows=231000 width=1436)
@@ -416,6 +372,42 @@ Stage-0
                                             Output:["i_item_id"]
                                             Filter Operator [FIL_160] (rows=231000 width=1436)
                                               predicate:((i_color) IN ('orchid', 'chiffon', 'lace') and i_item_id is not null)
-                                              TableScan [TS_3] (rows=462000 width=1436)
-                                                default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_id","i_color"]
+                                               Please refer to the previous TableScan [TS_3]
+                            <-Reducer 20 [SIMPLE_EDGE]
+                              SHUFFLE [RS_30]
+                                PartitionCols:_col3
+                                Select Operator [SEL_25] (rows=696954748 width=88)
+                                  Output:["_col3","_col5"]
+                                  Merge Join Operator [MERGEJOIN_176] (rows=696954748 width=88)
+                                    Conds:RS_22._col2=RS_23._col0(Inner),Output:["_col1","_col3"]
+                                  <-Map 26 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_23]
+                                      PartitionCols:_col0
+                                      Select Operator [SEL_18] (rows=20000000 width=1014)
+                                        Output:["_col0"]
+                                        Filter Operator [FIL_163] (rows=20000000 width=1014)
+                                          predicate:((ca_gmt_offset = -8) and ca_address_sk is not null)
+                                           Please refer to the previous TableScan [TS_16]
+                                  <-Reducer 19 [SIMPLE_EDGE]
+                                    SHUFFLE [RS_22]
+                                      PartitionCols:_col2
+                                      Merge Join Operator [MERGEJOIN_175] (rows=633595212 width=88)
+                                        Conds:RS_19._col0=RS_20._col0(Inner),Output:["_col1","_col2","_col3"]
+                                      <-Map 21 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_20]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_15] (rows=18262 width=1119)
+                                            Output:["_col0"]
+                                            Filter Operator [FIL_162] (rows=18262 width=1119)
+                                              predicate:((d_year = 2000) and (d_moy = 1) and d_date_sk is not null)
+                                               Please refer to the previous TableScan [TS_13]
+                                      <-Map 18 [SIMPLE_EDGE]
+                                        SHUFFLE [RS_19]
+                                          PartitionCols:_col0
+                                          Select Operator [SEL_12] (rows=575995635 width=88)
+                                            Output:["_col0","_col1","_col2","_col3"]
+                                            Filter Operator [FIL_161] (rows=575995635 width=88)
+                                              predicate:(ss_sold_date_sk is not null and ss_addr_sk is not null and ss_item_sk is not null)
+                                              TableScan [TS_10] (rows=575995635 width=88)
+                                                default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query58.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query58.q.out b/ql/src/test/results/clientpositive/perf/query58.q.out
index d03a736..e73605a 100644
--- a/ql/src/test/results/clientpositive/perf/query58.q.out
+++ b/ql/src/test/results/clientpositive/perf/query58.q.out
@@ -95,23 +95,23 @@ POSTHOOK: type: QUERY
 Plan optimized by CBO.
 
 Vertex dependency in root stage
-Reducer 14 <- Map 13 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
-Reducer 15 <- Map 21 (SIMPLE_EDGE), Reducer 14 (SIMPLE_EDGE)
-Reducer 16 <- Map 22 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE)
-Reducer 17 <- Reducer 16 (SIMPLE_EDGE)
-Reducer 19 <- Map 18 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
-Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 24 <- Map 23 (SIMPLE_EDGE), Reducer 29 (SIMPLE_EDGE)
-Reducer 25 <- Map 31 (SIMPLE_EDGE), Reducer 24 (SIMPLE_EDGE)
-Reducer 26 <- Map 32 (SIMPLE_EDGE), Reducer 25 (SIMPLE_EDGE)
-Reducer 27 <- Reducer 26 (SIMPLE_EDGE)
-Reducer 29 <- Map 28 (SIMPLE_EDGE), Map 30 (SIMPLE_EDGE)
-Reducer 3 <- Map 11 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
-Reducer 4 <- Map 12 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
+Reducer 10 <- Map 22 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
+Reducer 11 <- Reducer 10 (SIMPLE_EDGE)
+Reducer 12 <- Map 1 (SIMPLE_EDGE), Reducer 19 (SIMPLE_EDGE)
+Reducer 13 <- Map 24 (SIMPLE_EDGE), Reducer 12 (SIMPLE_EDGE)
+Reducer 14 <- Map 22 (SIMPLE_EDGE), Reducer 13 (SIMPLE_EDGE)
+Reducer 15 <- Reducer 14 (SIMPLE_EDGE)
+Reducer 17 <- Map 16 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
+Reducer 18 <- Map 16 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
+Reducer 19 <- Map 16 (SIMPLE_EDGE), Map 20 (SIMPLE_EDGE)
+Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 17 (SIMPLE_EDGE)
+Reducer 3 <- Map 21 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+Reducer 4 <- Map 22 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 4 (SIMPLE_EDGE)
-Reducer 6 <- Reducer 17 (SIMPLE_EDGE), Reducer 27 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+Reducer 6 <- Reducer 11 (SIMPLE_EDGE), Reducer 15 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
 Reducer 7 <- Reducer 6 (SIMPLE_EDGE)
-Reducer 9 <- Map 10 (SIMPLE_EDGE), Map 8 (SIMPLE_EDGE)
+Reducer 8 <- Map 1 (SIMPLE_EDGE), Reducer 18 (SIMPLE_EDGE)
+Reducer 9 <- Map 23 (SIMPLE_EDGE), Reducer 8 (SIMPLE_EDGE)
 
 Stage-0
   Fetch Operator
@@ -131,12 +131,12 @@ Stage-0
                   predicate:(_col5 BETWEEN (0.9 * _col1) AND (1.1 * _col1) and _col5 BETWEEN (0.9 * _col3) AND (1.1 * _col3) and _col1 BETWEEN (0.9 * _col5) AND (1.1 * _col5) and _col3 BETWEEN (0.9 * _col5) AND (1.1 * _col5) and _col1 BETWEEN (0.9 * _col3) AND (1.1 * _col3) and _col3 BETWEEN (0.9 * _col1) AND (1.1 * _col1))
                   Merge Join Operator [MERGEJOIN_213] (rows=766650239 width=88)
                     Conds:RS_99._col0=RS_100._col0(Inner),RS_99._col0=RS_101._col0(Inner),Output:["_col0","_col1","_col3","_col5"]
-                  <-Reducer 17 [SIMPLE_EDGE]
+                  <-Reducer 11 [SIMPLE_EDGE]
                     SHUFFLE [RS_100]
                       PartitionCols:_col0
                       Group By Operator [GBY_64] (rows=348477374 width=88)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 16 [SIMPLE_EDGE]
+                      <-Reducer 10 [SIMPLE_EDGE]
                         SHUFFLE [RS_63]
                           PartitionCols:_col0
                           Group By Operator [GBY_62] (rows=696954748 width=88)
@@ -150,14 +150,14 @@ Stage-0
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_195] (rows=462000 width=1436)
                                     predicate:(i_item_sk is not null and i_item_id is not null)
-                                    TableScan [TS_49] (rows=462000 width=1436)
+                                    TableScan [TS_16] (rows=462000 width=1436)
                                       default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                            <-Reducer 15 [SIMPLE_EDGE]
+                            <-Reducer 9 [SIMPLE_EDGE]
                               SHUFFLE [RS_58]
                                 PartitionCols:_col4
                                 Merge Join Operator [MERGEJOIN_207] (rows=633595212 width=88)
                                   Conds:RS_55._col0=RS_56._col0(Inner),Output:["_col4","_col5"]
-                                <-Map 21 [SIMPLE_EDGE]
+                                <-Map 23 [SIMPLE_EDGE]
                                   SHUFFLE [RS_56]
                                     PartitionCols:_col0
                                     Select Operator [SEL_48] (rows=575995635 width=88)
@@ -166,33 +166,33 @@ Stage-0
                                         predicate:(ss_item_sk is not null and ss_sold_date_sk is not null)
                                         TableScan [TS_46] (rows=575995635 width=88)
                                           default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_ext_sales_price"]
-                                <-Reducer 14 [SIMPLE_EDGE]
+                                <-Reducer 8 [SIMPLE_EDGE]
                                   SHUFFLE [RS_55]
                                     PartitionCols:_col0
                                     Merge Join Operator [MERGEJOIN_206] (rows=88388 width=1119)
                                       Conds:RS_52._col1=RS_53._col0(Inner),Output:["_col0"]
-                                    <-Map 13 [SIMPLE_EDGE]
+                                    <-Map 1 [SIMPLE_EDGE]
                                       SHUFFLE [RS_52]
                                         PartitionCols:_col1
                                         Select Operator [SEL_35] (rows=73049 width=1119)
                                           Output:["_col0","_col1"]
                                           Filter Operator [FIL_191] (rows=73049 width=1119)
                                             predicate:(d_date_sk is not null and d_date is not null)
-                                            TableScan [TS_33] (rows=73049 width=1119)
+                                            TableScan [TS_0] (rows=73049 width=1119)
                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                    <-Reducer 19 [SIMPLE_EDGE]
+                                    <-Reducer 18 [SIMPLE_EDGE]
                                       SHUFFLE [RS_53]
                                         PartitionCols:_col0
                                         Merge Join Operator [MERGEJOIN_205] (rows=80353 width=1119)
                                           Conds:RS_42._col1=RS_43._col1(Inner),Output:["_col0"]
-                                        <-Map 18 [SIMPLE_EDGE]
+                                        <-Map 16 [SIMPLE_EDGE]
                                           SHUFFLE [RS_42]
                                             PartitionCols:_col1
                                             Select Operator [SEL_38] (rows=73049 width=1119)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_192] (rows=73049 width=1119)
                                                 predicate:(d_week_seq is not null and d_date is not null)
-                                                TableScan [TS_36] (rows=73049 width=1119)
+                                                TableScan [TS_3] (rows=73049 width=1119)
                                                   default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
                                         <-Map 20 [SIMPLE_EDGE]
                                           SHUFFLE [RS_43]
@@ -201,35 +201,34 @@ Stage-0
                                               Output:["_col1"]
                                               Filter Operator [FIL_193] (rows=36524 width=1119)
                                                 predicate:((d_date = '1998-08-04') and d_week_seq is not null)
-                                                TableScan [TS_39] (rows=73049 width=1119)
+                                                TableScan [TS_6] (rows=73049 width=1119)
                                                   default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                  <-Reducer 27 [SIMPLE_EDGE]
+                  <-Reducer 15 [SIMPLE_EDGE]
                     SHUFFLE [RS_101]
                       PartitionCols:_col0
                       Group By Operator [GBY_97] (rows=87121617 width=135)
                         Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0
-                      <-Reducer 26 [SIMPLE_EDGE]
+                      <-Reducer 14 [SIMPLE_EDGE]
                         SHUFFLE [RS_96]
                           PartitionCols:_col0
                           Group By Operator [GBY_95] (rows=174243235 width=135)
                             Output:["_col0","_col1"],aggregations:["sum(_col5)"],keys:_col7
                             Merge Join Operator [MERGEJOIN_212] (rows=174243235 width=135)
                               Conds:RS_91._col4=RS_92._col0(Inner),Output:["_col5","_col7"]
-                            <-Map 32 [SIMPLE_EDGE]
+                            <-Map 22 [SIMPLE_EDGE]
                               SHUFFLE [RS_92]
                                 PartitionCols:_col0
                                 Select Operator [SEL_84] (rows=462000 width=1436)
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_200] (rows=462000 width=1436)
                                     predicate:(i_item_sk is not null and i_item_id is not null)
-                                    TableScan [TS_82] (rows=462000 width=1436)
-                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
-                            <-Reducer 25 [SIMPLE_EDGE]
+                                     Please refer to the previous TableScan [TS_16]
+                            <-Reducer 13 [SIMPLE_EDGE]
                               SHUFFLE [RS_91]
                                 PartitionCols:_col4
                                 Merge Join Operator [MERGEJOIN_211] (rows=158402938 width=135)
                                   Conds:RS_88._col0=RS_89._col0(Inner),Output:["_col4","_col5"]
-                                <-Map 31 [SIMPLE_EDGE]
+                                <-Map 24 [SIMPLE_EDGE]
                                   SHUFFLE [RS_89]
                                     PartitionCols:_col0
                                     Select Operator [SEL_81] (rows=144002668 width=135)
@@ -238,43 +237,40 @@ Stage-0
                                         predicate:(ws_item_sk is not null and ws_sold_date_sk is not null)
                                         TableScan [TS_79] (rows=144002668 width=135)
                                           default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_ext_sales_price"]
-                                <-Reducer 24 [SIMPLE_EDGE]
+                                <-Reducer 12 [SIMPLE_EDGE]
                                   SHUFFLE [RS_88]
                                     PartitionCols:_col0
                                     Merge Join Operator [MERGEJOIN_210] (rows=88388 width=1119)
                                       Conds:RS_85._col1=RS_86._col0(Inner),Output:["_col0"]
-                                    <-Map 23 [SIMPLE_EDGE]
+                                    <-Map 1 [SIMPLE_EDGE]
                                       SHUFFLE [RS_85]
                                         PartitionCols:_col1
                                         Select Operator [SEL_68] (rows=73049 width=1119)
                                           Output:["_col0","_col1"]
                                           Filter Operator [FIL_196] (rows=73049 width=1119)
                                             predicate:(d_date_sk is not null and d_date is not null)
-                                            TableScan [TS_66] (rows=73049 width=1119)
-                                              default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                    <-Reducer 29 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_0]
+                                    <-Reducer 19 [SIMPLE_EDGE]
                                       SHUFFLE [RS_86]
                                         PartitionCols:_col0
                                         Merge Join Operator [MERGEJOIN_209] (rows=80353 width=1119)
                                           Conds:RS_75._col1=RS_76._col1(Inner),Output:["_col0"]
-                                        <-Map 28 [SIMPLE_EDGE]
+                                        <-Map 16 [SIMPLE_EDGE]
                                           SHUFFLE [RS_75]
                                             PartitionCols:_col1
                                             Select Operator [SEL_71] (rows=73049 width=1119)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_197] (rows=73049 width=1119)
                                                 predicate:(d_week_seq is not null and d_date is not null)
-                                                TableScan [TS_69] (rows=73049 width=1119)
-                                                  default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                                        <-Map 30 [SIMPLE_EDGE]
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Map 20 [SIMPLE_EDGE]
                                           SHUFFLE [RS_76]
                                             PartitionCols:_col1
                                             Select Operator [SEL_74] (rows=36524 width=1119)
                                               Output:["_col1"]
                                               Filter Operator [FIL_198] (rows=36524 width=1119)
                                                 predicate:((d_date = '1998-08-04') and d_week_seq is not null)
-                                                TableScan [TS_72] (rows=73049 width=1119)
-                                                  default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
+                                                 Please refer to the previous TableScan [TS_6]
                   <-Reducer 5 [SIMPLE_EDGE]
                     SHUFFLE [RS_99]
                       PartitionCols:_col0
@@ -287,21 +283,20 @@ Stage-0
                             Output:["_col0","_col1"],aggregations:["sum(_col5)"],keys:_col7
                             Merge Join Operator [MERGEJOIN_204] (rows=348467716 width=135)
                               Conds:RS_25._col4=RS_26._col0(Inner),Output:["_col5","_col7"]
-                            <-Map 12 [SIMPLE_EDGE]
+                            <-Map 22 [SIMPLE_EDGE]
                               SHUFFLE [RS_26]
                                 PartitionCols:_col0
                                 Select Operator [SEL_18] (rows=462000 width=1436)
                                   Output:["_col0","_col1"]
                                   Filter Operator [FIL_190] (rows=462000 width=1436)
                                     predicate:(i_item_sk is not null and i_item_id is not null)
-                                    TableScan [TS_16] (rows=462000 width=1436)
-                                      default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_item_id"]
+                                     Please refer to the previous TableScan [TS_16]
                             <-Reducer 3 [SIMPLE_EDGE]
                               SHUFFLE [RS_25]
                                 PartitionCols:_col4
                                 Merge Join Operator [MERGEJOIN_203] (rows=316788826 width=135)
                                   Conds:RS_22._col0=RS_23._col0(Inner),Output:["_col4","_col5"]
-                                <-Map 11 [SIMPLE_EDGE]
+                                <-Map 21 [SIMPLE_EDGE]
                                   SHUFFLE [RS_23]
                                     PartitionCols:_col0
                                     Select Operator [SEL_15] (rows=287989836 width=135)
@@ -322,29 +317,26 @@ Stage-0
                                           Output:["_col0","_col1"]
                                           Filter Operator [FIL_186] (rows=73049 width=1119)
                                             predicate:(d_date_sk is not null and d_date is not null)
-                                            TableScan [TS_0] (rows=73049 width=1119)
-                                              default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
-                                    <-Reducer 9 [SIMPLE_EDGE]
+                                             Please refer to the previous TableScan [TS_0]
+                                    <-Reducer 17 [SIMPLE_EDGE]
                                       SHUFFLE [RS_20]
                                         PartitionCols:_col0
                                         Merge Join Operator [MERGEJOIN_201] (rows=80353 width=1119)
                                           Conds:RS_9._col1=RS_10._col1(Inner),Output:["_col0"]
-                                        <-Map 10 [SIMPLE_EDGE]
-                                          SHUFFLE [RS_10]
-                                            PartitionCols:_col1
-                                            Select Operator [SEL_8] (rows=36524 width=1119)
-                                              Output:["_col1"]
-                                              Filter Operator [FIL_188] (rows=36524 width=1119)
-                                                predicate:((d_date = '1998-08-04') and d_week_seq is not null)
-                                                TableScan [TS_6] (rows=73049 width=1119)
-                                                  default@date_dim,d2,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
-                                        <-Map 8 [SIMPLE_EDGE]
+                                        <-Map 16 [SIMPLE_EDGE]
                                           SHUFFLE [RS_9]
                                             PartitionCols:_col1
                                             Select Operator [SEL_5] (rows=73049 width=1119)
                                               Output:["_col0","_col1"]
                                               Filter Operator [FIL_187] (rows=73049 width=1119)
                                                 predicate:(d_week_seq is not null and d_date is not null)
-                                                TableScan [TS_3] (rows=73049 width=1119)
-                                                  default@date_dim,d1,Tbl:COMPLETE,Col:NONE,Output:["d_date","d_week_seq"]
+                                                 Please refer to the previous TableScan [TS_3]
+                                        <-Map 20 [SIMPLE_EDGE]
+                                          SHUFFLE [RS_10]
+                                            PartitionCols:_col1
+                                            Select Operator [SEL_8] (rows=36524 width=1119)
+                                              Output:["_col1"]
+                                              Filter Operator [FIL_188] (rows=36524 width=1119)
+                                                predicate:((d_date = '1998-08-04') and d_week_seq is not null)
+                                                 Please refer to the previous TableScan [TS_6]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/59f65772/ql/src/test/results/clientpositive/perf/query6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query6.q.out b/ql/src/test/results/clientpositive/perf/query6.q.out
index 183c2ec..04bda70 100644
--- a/ql/src/test/results/clientpositive/perf/query6.q.out
+++ b/ql/src/test/results/clientpositive/perf/query6.q.out
@@ -52,11 +52,11 @@ Plan optimized by CBO.
 Vertex dependency in root stage
 Reducer 12 <- Map 11 (SIMPLE_EDGE), Map 13 (SIMPLE_EDGE)
 Reducer 15 <- Map 14 (SIMPLE_EDGE)
-Reducer 16 <- Reducer 15 (CUSTOM_SIMPLE_EDGE), Reducer 20 (CUSTOM_SIMPLE_EDGE)
-Reducer 17 <- Map 21 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
-Reducer 19 <- Map 18 (SIMPLE_EDGE)
+Reducer 16 <- Reducer 15 (CUSTOM_SIMPLE_EDGE), Reducer 19 (CUSTOM_SIMPLE_EDGE)
+Reducer 17 <- Map 20 (SIMPLE_EDGE), Reducer 16 (SIMPLE_EDGE)
+Reducer 18 <- Map 14 (SIMPLE_EDGE)
+Reducer 19 <- Reducer 18 (CUSTOM_SIMPLE_EDGE)
 Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 9 (SIMPLE_EDGE)
-Reducer 20 <- Reducer 19 (CUSTOM_SIMPLE_EDGE)
 Reducer 3 <- Map 10 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
 Reducer 4 <- Reducer 12 (SIMPLE_EDGE), Reducer 3 (SIMPLE_EDGE)
 Reducer 5 <- Reducer 17 (SIMPLE_EDGE), Reducer 4 (SIMPLE_EDGE)
@@ -100,7 +100,7 @@ Stage-0
                                   Output:["_col0"]
                                   Merge Join Operator [MERGEJOIN_112] (rows=80353 width=1119)
                                     Conds:RS_50._col0=RS_51._col1(Inner),Output:["_col2"]
-                                  <-Map 21 [SIMPLE_EDGE]
+                                  <-Map 20 [SIMPLE_EDGE]
                                     SHUFFLE [RS_51]
                                       PartitionCols:_col1
                                       Select Operator [SEL_46] (rows=73049 width=1119)
@@ -129,21 +129,21 @@ Stage-0
                                                     predicate:((d_year = 2000) and (d_moy = 2) and d_month_seq is not null)
                                                     TableScan [TS_23] (rows=73049 width=1119)
                                                       default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_month_seq","d_year","d_moy"]
-                                      <-Reducer 20 [CUSTOM_SIMPLE_EDGE]
+                                      <-Reducer 19 [CUSTOM_SIMPLE_EDGE]
                                         PARTITION_ONLY_SHUFFLE [RS_48]
                                           Select Operator [SEL_43] (rows=1 width=8)
                                             Filter Operator [FIL_42] (rows=1 width=8)
                                               predicate:(sq_count_check(_col0) <= 1)
                                               Group By Operator [GBY_40] (rows=1 width=8)
                                                 Output:["_col0"],aggregations:["count(VALUE._col0)"]
-                                              <-Reducer 19 [CUSTOM_SIMPLE_EDGE]
+                                              <-Reducer 18 [CUSTOM_SIMPLE_EDGE]
                                                 PARTITION_ONLY_SHUFFLE [RS_39]
                                                   Group By Operator [GBY_38] (rows=1 width=8)
                                                     Output:["_col0"],aggregations:["count()"]
                                                     Select Operator [SEL_36] (rows=9131 width=1119)
                                                       Group By Operator [GBY_35] (rows=9131 width=1119)
                                                         Output:["_col0"],keys:KEY._col0
-                                                      <-Map 18 [SIMPLE_EDGE]
+                                                      <-Map 14 [SIMPLE_EDGE]
                                                         SHUFFLE [RS_34]
                                                           PartitionCols:_col0
                                                           Group By Operator [GBY_33] (rows=18262 width=1119)
@@ -152,8 +152,7 @@ Stage-0
                                                               Output:["d_month_seq"]
                                                               Filter Operator [FIL_106] (rows=18262 width=1119)
                                                                 predicate:((d_year = 2000) and (d_moy = 2))
-                                                                TableScan [TS_30] (rows=73049 width=1119)
-                                                                  default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_month_seq","d_year","d_moy"]
+                                                                 Please refer to the previous TableScan [TS_23]
                             <-Reducer 4 [SIMPLE_EDGE]
                               SHUFFLE [RS_63]
                                 PartitionCols:_col6


[03/50] [abbrv] hive git commit: HIVE-16639. LLAP: Derive shuffle thread counts and keep-alive connections from instance count. (Siddharth Seth, reviewed by Gopal V)

Posted by we...@apache.org.
HIVE-16639. LLAP: Derive shuffle thread counts and keep-alive connections from instance count. (Siddharth Seth, reviewed by Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3baaca74
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3baaca74
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3baaca74

Branch: refs/heads/hive-14535
Commit: 3baaca747d7bd81d58a8651074af5aed67067c6f
Parents: 113a099
Author: Siddharth Seth <ss...@HW10890.local>
Authored: Fri May 12 15:49:27 2017 -0700
Committer: Siddharth Seth <ss...@HW10890.local>
Committed: Fri May 12 15:49:27 2017 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java | 4 ++--
 llap-server/src/main/resources/package.py                      | 6 +++++-
 2 files changed, 7 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/3baaca74/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
index 085c977..0705225 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
@@ -171,7 +171,7 @@ public class ShuffleHandler implements AttemptRegistrationListener {
 
   public static final String SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED =
       "llap.shuffle.connection-keep-alive.enable";
-  public static final boolean DEFAULT_SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED = false;
+  public static final boolean DEFAULT_SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED = true;
 
   public static final String SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT =
       "llap.shuffle.connection-keep-alive.timeout";
@@ -194,7 +194,7 @@ public class ShuffleHandler implements AttemptRegistrationListener {
   
   public static final String MAX_SHUFFLE_THREADS = "llap.shuffle.max.threads";
   // 0 implies Netty default of 2 * number of available processors
-  public static final int DEFAULT_MAX_SHUFFLE_THREADS = 0;
+  public static final int DEFAULT_MAX_SHUFFLE_THREADS = Runtime.getRuntime().availableProcessors() * 3;
   
   public static final String SHUFFLE_BUFFER_SIZE = 
       "llap.shuffle.transfer.buffer.size";

http://git-wip-us.apache.org/repos/asf/hive/blob/3baaca74/llap-server/src/main/resources/package.py
----------------------------------------------------------------------
diff --git a/llap-server/src/main/resources/package.py b/llap-server/src/main/resources/package.py
index 8a378ef..e83d3b0 100644
--- a/llap-server/src/main/resources/package.py
+++ b/llap-server/src/main/resources/package.py
@@ -20,6 +20,7 @@ class LlapResource(object):
 		# convert to Mb
 		self.cache = config["hive.llap.io.memory.size"] / (1024*1024.0)
 		self.direct = config["hive.llap.io.allocator.direct"]
+		self.executors = config["hive.llap.daemon.num.executors"]
 		self.min_cores = -1
 		# compute heap + cache as final Xmx
 		h = self.memory 
@@ -129,10 +130,13 @@ def main(args):
 	config = json_parse(open(join(input, "config.json")).read())
 	java_home = config["java.home"]
 	max_direct_memory = config["max_direct_memory"]
+
+	resource = LlapResource(config)
+
 	daemon_args = args.args
 	if long(max_direct_memory) > 0:
 		daemon_args = " -XX:MaxDirectMemorySize=%s %s" % (max_direct_memory, daemon_args)
-	resource = LlapResource(config)
+	daemon_args = " -Dhttp.maxConnections=%s %s" % ((max(args.instances, resource.executors) + 1), daemon_args)
 	# 5% container failure every monkey_interval seconds
 	monkey_percentage = 5 # 5%
 	vars = {


[02/50] [abbrv] hive git commit: HIVE-16634. LLAP Use a pool of connections to a single AM from a daemon. (Siddharth Seth, reviewed by Sergey Shelukhin)

Posted by we...@apache.org.
HIVE-16634. LLAP Use a pool of connections to a single AM from a daemon.
(Siddharth Seth, reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/113a0991
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/113a0991
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/113a0991

Branch: refs/heads/hive-14535
Commit: 113a099125fa0dae25bbb8417582edeeeed5aac3
Parents: 8a7b5b5
Author: Siddharth Seth <ss...@HW10890.local>
Authored: Fri May 12 14:48:46 2017 -0700
Committer: Siddharth Seth <ss...@HW10890.local>
Committed: Fri May 12 14:48:46 2017 -0700

----------------------------------------------------------------------
 .../hadoop/hive/llap/daemon/impl/QueryInfo.java | 44 +++++++++++---------
 .../hive/llap/daemon/impl/QueryTracker.java     |  4 +-
 .../llap/daemon/impl/TaskRunnerCallable.java    |  1 +
 .../daemon/impl/TaskExecutorTestHelpers.java    |  5 ++-
 4 files changed, 31 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/113a0991/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryInfo.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryInfo.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryInfo.java
index 088f07c..ce2f457 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryInfo.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryInfo.java
@@ -24,9 +24,10 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.locks.ReentrantLock;
 
 import com.google.common.base.Preconditions;
@@ -57,6 +58,8 @@ public class QueryInfo {
   private final FileSystem localFs;
   private String[] localDirs;
   private final LlapNodeId amNodeId;
+  private final String appTokenIdentifier;
+  private final Token<JobTokenIdentifier> appToken;
   // Map of states for different vertices.
 
   private final Set<QueryFragmentInfo> knownFragments =
@@ -66,14 +69,15 @@ public class QueryInfo {
 
   private final FinishableStateTracker finishableStateTracker = new FinishableStateTracker();
   private final String tokenUserName, appId;
-  private final AtomicReference<UserGroupInformation> umbilicalUgi;
 
   public QueryInfo(QueryIdentifier queryIdentifier, String appIdString, String dagIdString,
     String dagName, String hiveQueryIdString,
     int dagIdentifier, String user,
     ConcurrentMap<String, SourceStateProto> sourceStateMap,
     String[] localDirsBase, FileSystem localFs, String tokenUserName,
-    String tokenAppId, final LlapNodeId amNodeId) {
+    String tokenAppId, final LlapNodeId amNodeId,
+    String tokenIdentifier,
+    Token<JobTokenIdentifier> appToken) {
     this.queryIdentifier = queryIdentifier;
     this.appIdString = appIdString;
     this.dagIdString = dagIdString;
@@ -86,8 +90,12 @@ public class QueryInfo {
     this.localFs = localFs;
     this.tokenUserName = tokenUserName;
     this.appId = tokenAppId;
-    this.umbilicalUgi = new AtomicReference<>();
     this.amNodeId = amNodeId;
+    this.appTokenIdentifier = tokenIdentifier;
+    this.appToken = appToken;
+    final InetSocketAddress address =
+        NetUtils.createSocketAddrForHost(amNodeId.getHostname(), amNodeId.getPort());
+    SecurityUtil.setTokenService(appToken, address);
   }
 
   public QueryIdentifier getQueryIdentifier() {
@@ -314,23 +322,21 @@ public class QueryInfo {
     return appId;
   }
 
-  public void setupUmbilicalUgi(String umbilicalUser, Token<JobTokenIdentifier> appToken, String amHost, int amPort) {
-    synchronized (umbilicalUgi) {
-      if (umbilicalUgi.get() == null) {
-        UserGroupInformation taskOwner =
-            UserGroupInformation.createRemoteUser(umbilicalUser);
-        final InetSocketAddress address =
-            NetUtils.createSocketAddrForHost(amHost, amPort);
-        SecurityUtil.setTokenService(appToken, address);
-        taskOwner.addToken(appToken);
-        umbilicalUgi.set(taskOwner);
-      }
-    }
-  }
+
+  private final BlockingQueue<UserGroupInformation> ugiPool = new LinkedBlockingQueue<>();
 
   public UserGroupInformation getUmbilicalUgi() {
-    synchronized (umbilicalUgi) {
-      return umbilicalUgi.get();
+
+    UserGroupInformation ugi;
+    ugi = ugiPool.poll();
+    if (ugi == null) {
+      ugi = UserGroupInformation.createRemoteUser(appTokenIdentifier);
+      ugi.addToken(appToken);
     }
+    return ugi;
+  }
+
+  public void returnUmbilicalUgi(UserGroupInformation ugi) {
+    ugiPool.offer(ugi);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/113a0991/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
index 7e646c5..daeb555 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
@@ -169,13 +169,11 @@ public class QueryTracker extends AbstractService {
             new QueryInfo(queryIdentifier, appIdString, dagIdString, dagName, hiveQueryIdString,
                 dagIdentifier, user,
                 getSourceCompletionMap(queryIdentifier), localDirsBase, localFs,
-                tokenInfo.userName, tokenInfo.appId, amNodeId);
+                tokenInfo.userName, tokenInfo.appId, amNodeId, vertex.getTokenIdentifier(), appToken);
         QueryInfo old = queryInfoMap.putIfAbsent(queryIdentifier, queryInfo);
         if (old != null) {
           queryInfo = old;
         } else {
-          // Ensure the UGI is setup once.
-          queryInfo.setupUmbilicalUgi(vertex.getTokenIdentifier(), appToken, amNodeId.getHostname(), amNodeId.getPort());
           isExistingQueryInfo = false;
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/113a0991/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
index 1669815..7d7fd23 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
@@ -274,6 +274,7 @@ public class TaskRunnerCallable extends CallableWithNdc<TaskRunner2Result> {
           return result;
         } finally {
           FileSystem.closeAllForUGI(fsTaskUgi);
+          fragmentInfo.getQueryInfo().returnUmbilicalUgi(taskOwner);
           LOG.info("ExecutionTime for Container: " + request.getContainerIdString() + "=" +
                   runtimeWatch.stop().elapsedMillis());
           if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/113a0991/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
----------------------------------------------------------------------
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
index 6287ae8..27c426c 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
@@ -23,6 +23,7 @@ import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.llap.LlapNodeId;
 import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler;
 import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler;
 import org.apache.hadoop.hive.llap.daemon.SchedulerFragmentCompletingListener;
@@ -34,6 +35,7 @@ import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrB
 import org.apache.hadoop.hive.llap.metrics.LlapDaemonExecutorMetrics;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.tez.dag.records.TezDAGID;
 import org.apache.tez.dag.records.TezVertexID;
 import org.apache.tez.hadoop.shim.DefaultHadoopShim;
@@ -89,11 +91,12 @@ public class TaskExecutorTestHelpers {
 
   public static QueryInfo createQueryInfo() {
     QueryIdentifier queryIdentifier = new QueryIdentifier("fake_app_id_string", 1);
+    LlapNodeId nodeId = LlapNodeId.getInstance("localhost", 0);
     QueryInfo queryInfo =
         new QueryInfo(queryIdentifier, "fake_app_id_string", "fake_dag_id_string", "fake_dag_name",
             "fakeHiveQueryId", 1, "fakeUser",
             new ConcurrentHashMap<String, LlapDaemonProtocolProtos.SourceStateProto>(),
-            new String[0], null, "fakeUser", null, null);
+            new String[0], null, "fakeUser", null, nodeId, null, null);
     return queryInfo;
   }