You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/08/14 06:57:42 UTC

svn commit: r1372727 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/optimizer/ test/queries/clientpositive/ test/results/clientpositive/

Author: namit
Date: Tue Aug 14 04:57:41 2012
New Revision: 1372727

URL: http://svn.apache.org/viewvc?rev=1372727&view=rev
Log:
HIVE-3343 Hive: Query misaligned result for Group by followed by Join with filter
and skip a group-by result (Gang Tim Liu via namit)


Added:
    hive/trunk/ql/src/test/queries/clientpositive/ppd_join_filter.q
    hive/trunk/ql/src/test/results/clientpositive/ppd_join_filter.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hive/trunk/ql/src/test/results/clientpositive/lateral_view_ppd.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1372727&r1=1372726&r2=1372727&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Tue Aug 14 04:57:41 2012
@@ -87,8 +87,11 @@ public final class ColumnPrunerProcFacto
       // get list of columns used in the filter
       List<String> cl = condn.getCols();
       // merge it with the downstream col list
+      List<String> filterOpPrunedColLists = Utilities.mergeUniqElems(cppCtx.genColLists(op), cl);
+      List<String> filterOpPrunedColListsOrderPreserved = preserveColumnOrder(op,
+          filterOpPrunedColLists);
       cppCtx.getPrunedColLists().put(op,
-          Utilities.mergeUniqElems(cppCtx.genColLists(op), cl));
+          filterOpPrunedColListsOrderPreserved);
 
       pruneOperator(cppCtx, op, cppCtx.getPrunedColLists().get(op));
 
@@ -591,6 +594,32 @@ public final class ColumnPrunerProcFacto
     }
   }
 
+  /**
+   * The pruning needs to preserve the order of columns in the input schema
+   * @param op
+   * @param cols
+   * @return
+   * @throws SemanticException
+   */
+  private static List<String> preserveColumnOrder(Operator<? extends Serializable> op,
+      List<String> cols)
+      throws SemanticException {
+    RowSchema inputSchema = op.getSchema();
+    if (inputSchema != null) {
+      ArrayList<String> rs = new ArrayList<String>();
+      ArrayList<ColumnInfo> inputCols = inputSchema.getSignature();
+      for (ColumnInfo i: inputCols) {
+        if (cols.contains(i.getInternalName())) {
+          rs.add(i.getInternalName());
+        }
+      }
+      return rs;
+    } else {
+      return cols;
+    }
+  }
+
+
   private static void pruneJoinOperator(NodeProcessorCtx ctx,
       CommonJoinOperator op, JoinDesc conf,
       Map<String, ExprNodeDesc> columnExprMap,

Added: hive/trunk/ql/src/test/queries/clientpositive/ppd_join_filter.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ppd_join_filter.q?rev=1372727&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ppd_join_filter.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/ppd_join_filter.q Tue Aug 14 04:57:41 2012
@@ -0,0 +1,116 @@
+set hive.optimize.ppd=true;
+set hive.ppd.remove.duplicatefilters=false;
+
+explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+set hive.optimize.ppd=true;
+set hive.ppd.remove.duplicatefilters=true;
+
+explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+set hive.optimize.ppd=false;
+set hive.ppd.remove.duplicatefilters=false;
+
+explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+set hive.optimize.ppd=faluse;
+set hive.ppd.remove.duplicatefilters=true;
+
+explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+
+select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5;
+

Modified: hive/trunk/ql/src/test/results/clientpositive/lateral_view_ppd.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/lateral_view_ppd.q.out?rev=1372727&r1=1372726&r2=1372727&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/lateral_view_ppd.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/lateral_view_ppd.q.out Tue Aug 14 04:57:41 2012
@@ -233,13 +233,13 @@ STAGE PLANS:
                     expressions:
                           expr: _col1
                           type: string
-                          expr: _col4
-                          type: int
                           expr: _col2
                           type: string
                           expr: _col3
                           type: string
-                    outputColumnNames: _col1, _col4, _col2, _col3
+                          expr: _col4
+                          type: int
+                    outputColumnNames: _col1, _col2, _col3, _col4
                     Select Operator
                       expressions:
                             expr: _col1
@@ -267,13 +267,13 @@ STAGE PLANS:
                       expressions:
                             expr: _col1
                             type: string
-                            expr: _col4
-                            type: int
                             expr: _col2
                             type: string
                             expr: _col3
                             type: string
-                      outputColumnNames: _col1, _col4, _col2, _col3
+                            expr: _col4
+                            type: int
+                      outputColumnNames: _col1, _col2, _col3, _col4
                       Select Operator
                         expressions:
                               expr: _col1
@@ -348,13 +348,13 @@ STAGE PLANS:
                           outputColumnNames: _col0, _col1, _col2, _col3
                           Select Operator
                             expressions:
+                                  expr: _col0
+                                  type: string
                                   expr: _col1
                                   type: string
                                   expr: _col2
                                   type: int
-                                  expr: _col0
-                                  type: string
-                            outputColumnNames: _col1, _col2, _col0
+                            outputColumnNames: _col0, _col1, _col2
                             Select Operator
                               expressions:
                                     expr: _col1
@@ -379,13 +379,13 @@ STAGE PLANS:
                             outputColumnNames: _col0, _col1, _col2, _col3
                             Select Operator
                               expressions:
+                                    expr: _col0
+                                    type: string
                                     expr: _col1
                                     type: string
                                     expr: _col2
                                     type: int
-                                    expr: _col0
-                                    type: string
-                              outputColumnNames: _col1, _col2, _col0
+                              outputColumnNames: _col0, _col1, _col2
                               Select Operator
                                 expressions:
                                       expr: _col1
@@ -415,13 +415,13 @@ STAGE PLANS:
                             outputColumnNames: _col0, _col1, _col2, _col3
                             Select Operator
                               expressions:
+                                    expr: _col0
+                                    type: string
                                     expr: _col1
                                     type: string
                                     expr: _col2
                                     type: int
-                                    expr: _col0
-                                    type: string
-                              outputColumnNames: _col1, _col2, _col0
+                              outputColumnNames: _col0, _col1, _col2
                               Select Operator
                                 expressions:
                                       expr: _col1
@@ -446,13 +446,13 @@ STAGE PLANS:
                               outputColumnNames: _col0, _col1, _col2, _col3
                               Select Operator
                                 expressions:
+                                      expr: _col0
+                                      type: string
                                       expr: _col1
                                       type: string
                                       expr: _col2
                                       type: int
-                                      expr: _col0
-                                      type: string
-                                outputColumnNames: _col1, _col2, _col0
+                                outputColumnNames: _col0, _col1, _col2
                                 Select Operator
                                   expressions:
                                         expr: _col1

Added: hive/trunk/ql/src/test/results/clientpositive/ppd_join_filter.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ppd_join_filter.q.out?rev=1372727&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ppd_join_filter.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/ppd_join_filter.q.out Tue Aug 14 04:57:41 2012
@@ -0,0 +1,1308 @@
+PREHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) k) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 1) k1) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 2) k2) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 3) k3)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) b) (and (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (< (. (TOK_TABLE_OR_COL b) k1) 5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k3)))))
+
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b:src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: key
+              Group By Operator
+                aggregations:
+                      expr: min(key)
+                bucketGroup: false
+                keys:
+                      expr: key
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: min(VALUE._col0)
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: (_col1 + 1)
+                  type: double
+                  expr: (_col1 + 2)
+                  type: double
+                  expr: (_col1 + 3)
+                  type: double
+            outputColumnNames: _col0, _col2, _col3, _col4
+            Filter Operator
+              isSamplingPred: false
+              predicate:
+                  expr: (_col2 < 5.0)
+                  type: boolean
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns _col0,_col2,_col3,_col4
+                      columns.types string,double,double,double
+                      escape.delim \
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+            Reduce Output Operator
+              key expressions:
+                    expr: _col0
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col0
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: _col3
+                    type: double
+                    expr: _col4
+                    type: double
+        a 
+          TableScan
+            alias: a
+            GatherStats: false
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+      Needs Tagging: true
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: -mr-10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col2,_col3,_col4
+              columns.types string,double,double,double
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col2,_col3,_col4
+                columns.types string,double,double,double
+                escape.delim \
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 {VALUE._col3} {VALUE._col4}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col7, _col8
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col7
+                  type: double
+                  expr: _col8
+                  type: double
+            outputColumnNames: _col0, _col1, _col2
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2
+                    columns.types string:double:double
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	2.0	3.0
+0	2.0	3.0
+0	2.0	3.0
+2	4.0	5.0
+PREHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) k) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 1) k1) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 2) k2) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 3) k3)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) b) (and (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (< (. (TOK_TABLE_OR_COL b) k1) 5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k3)))))
+
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b:src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: key
+              Group By Operator
+                aggregations:
+                      expr: min(key)
+                bucketGroup: false
+                keys:
+                      expr: key
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: min(VALUE._col0)
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: (_col1 + 1)
+                  type: double
+                  expr: (_col1 + 2)
+                  type: double
+                  expr: (_col1 + 3)
+                  type: double
+            outputColumnNames: _col0, _col2, _col3, _col4
+            Filter Operator
+              isSamplingPred: false
+              predicate:
+                  expr: (_col2 < 5.0)
+                  type: boolean
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns _col0,_col2,_col3,_col4
+                      columns.types string,double,double,double
+                      escape.delim \
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+            Reduce Output Operator
+              key expressions:
+                    expr: _col0
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col0
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: _col3
+                    type: double
+                    expr: _col4
+                    type: double
+        a 
+          TableScan
+            alias: a
+            GatherStats: false
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+      Needs Tagging: true
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: -mr-10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col2,_col3,_col4
+              columns.types string,double,double,double
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col2,_col3,_col4
+                columns.types string,double,double,double
+                escape.delim \
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 {VALUE._col3} {VALUE._col4}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col7, _col8
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col7
+                  type: double
+                  expr: _col8
+                  type: double
+            outputColumnNames: _col0, _col1, _col2
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2
+                    columns.types string:double:double
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	2.0	3.0
+0	2.0	3.0
+0	2.0	3.0
+2	4.0	5.0
+PREHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) k) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 1) k1) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 2) k2) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 3) k3)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) b) (and (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (< (. (TOK_TABLE_OR_COL b) k1) 5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k3)))))
+
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b:src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: key
+              Group By Operator
+                aggregations:
+                      expr: min(key)
+                bucketGroup: false
+                keys:
+                      expr: key
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: min(VALUE._col0)
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: (_col1 + 1)
+                  type: double
+                  expr: (_col1 + 2)
+                  type: double
+                  expr: (_col1 + 3)
+                  type: double
+            outputColumnNames: _col0, _col2, _col3, _col4
+            Filter Operator
+              isSamplingPred: false
+              predicate:
+                  expr: (_col2 < 5.0)
+                  type: boolean
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns _col0,_col2,_col3,_col4
+                      columns.types string,double,double,double
+                      escape.delim \
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+            Reduce Output Operator
+              key expressions:
+                    expr: _col0
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col0
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: _col3
+                    type: double
+                    expr: _col4
+                    type: double
+        a 
+          TableScan
+            alias: a
+            GatherStats: false
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+      Needs Tagging: true
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: -mr-10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col2,_col3,_col4
+              columns.types string,double,double,double
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col2,_col3,_col4
+                columns.types string,double,double,double
+                escape.delim \
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 {VALUE._col3} {VALUE._col4}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col7, _col8
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col7
+                  type: double
+                  expr: _col8
+                  type: double
+            outputColumnNames: _col0, _col1, _col2
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2
+                    columns.types string:double:double
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	2.0	3.0
+0	2.0	3.0
+0	2.0	3.0
+2	4.0	5.0
+PREHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) k) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 1) k1) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 2) k2) (TOK_SELEXPR (+ (TOK_FUNCTION min (TOK_TABLE_OR_COL key)) 3) k3)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) b) (and (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (< (. (TOK_TABLE_OR_COL b) k1) 5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) k3)))))
+
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b:src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: key
+              Group By Operator
+                aggregations:
+                      expr: min(key)
+                bucketGroup: false
+                keys:
+                      expr: key
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: min(VALUE._col0)
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: (_col1 + 1)
+                  type: double
+                  expr: (_col1 + 2)
+                  type: double
+                  expr: (_col1 + 3)
+                  type: double
+            outputColumnNames: _col0, _col2, _col3, _col4
+            Filter Operator
+              isSamplingPred: false
+              predicate:
+                  expr: (_col2 < 5.0)
+                  type: boolean
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns _col0,_col2,_col3,_col4
+                      columns.types string,double,double,double
+                      escape.delim \
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+            Reduce Output Operator
+              key expressions:
+                    expr: _col0
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col0
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: _col3
+                    type: double
+                    expr: _col4
+                    type: double
+        a 
+          TableScan
+            alias: a
+            GatherStats: false
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+      Needs Tagging: true
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: -mr-10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col2,_col3,_col4
+              columns.types string,double,double,double
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col2,_col3,_col4
+                columns.types string,double,double,double
+                escape.delim \
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 {VALUE._col3} {VALUE._col4}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col7, _col8
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col7
+                  type: double
+                  expr: _col8
+                  type: double
+            outputColumnNames: _col0, _col1, _col2
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2
+                    columns.types string:double:double
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, b.k2, b.k3
+from src a
+join (
+select key,
+min(key) as k,
+min(key)+1 as k1,
+min(key)+2 as k2,
+min(key)+3 as k3
+from src
+group by key
+) b
+on a.key=b.key and b.k1 < 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	2.0	3.0
+0	2.0	3.0
+0	2.0	3.0
+2	4.0	5.0