You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2015/10/15 19:50:12 UTC

hive git commit: HIVE-11892: UDTF run in local fetch task does not return rows forwarded during GenericUDTF.close() (Jason Dere, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 3dc6ec9cb -> 370a807d5


HIVE-11892: UDTF run in local fetch task does not return rows forwarded during GenericUDTF.close() (Jason Dere, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/370a807d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/370a807d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/370a807d

Branch: refs/heads/master
Commit: 370a807d5261de1105c912bc69b58a4bd3711957
Parents: 3dc6ec9
Author: Jason Dere <jd...@hortonworks.com>
Authored: Thu Oct 15 10:48:47 2015 -0700
Committer: Jason Dere <jd...@hortonworks.com>
Committed: Thu Oct 15 10:48:47 2015 -0700

----------------------------------------------------------------------
 .../hive/ql/optimizer/SimpleFetchOptimizer.java |   3 +-
 .../queries/clientpositive/udtf_nofetchtask.q   |  10 +
 .../clientpositive/lateral_view_noalias.q.out   | 120 +++++++----
 .../results/clientpositive/nonmr_fetch.q.out    |  78 ++++---
 .../clientpositive/select_dummy_source.q.out    |  38 ++--
 .../clientpositive/tez/explainuser_1.q.out      |  21 +-
 .../clientpositive/tez/explainuser_3.q.out      |  55 +++--
 .../tez/select_dummy_source.q.out               |  76 +++++--
 .../results/clientpositive/udf_explode.q.out    | 206 ++++++++++++++++---
 .../results/clientpositive/udf_inline.q.out     |  42 ++--
 .../results/clientpositive/udtf_explode.q.out   | 149 +++++++++++---
 .../clientpositive/udtf_nofetchtask.q.out       |  30 +++
 12 files changed, 621 insertions(+), 207 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
index af74fff..2af6f9a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.exec.ScriptOperator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.UDTFOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat;
@@ -283,7 +284,7 @@ public class SimpleFetchOptimizer implements Transform {
 
   private boolean isConvertible(FetchData fetch, Operator<?> operator, Set<Operator<?>> traversed) {
     if (operator instanceof ReduceSinkOperator || operator instanceof CommonJoinOperator
-        || operator instanceof ScriptOperator) {
+        || operator instanceof ScriptOperator || operator instanceof UDTFOperator) {
       return false;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/queries/clientpositive/udtf_nofetchtask.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/udtf_nofetchtask.q b/ql/src/test/queries/clientpositive/udtf_nofetchtask.q
new file mode 100644
index 0000000..f64b12f
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udtf_nofetchtask.q
@@ -0,0 +1,10 @@
+create temporary function udtfCount2 as 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2';
+
+set hive.fetch.task.conversion=minimal;
+-- Correct output should be 2 rows
+select udtfCount2() from src;
+
+set hive.fetch.task.conversion=more;
+-- Should still have the same output with fetch task conversion enabled
+select udtfCount2() from src;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/lateral_view_noalias.q.out b/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
index 90c9155..7988bd7 100644
--- a/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
+++ b/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
@@ -5,38 +5,20 @@ POSTHOOK: query: --HIVE-2608 Do not require AS a,b,c part in LATERAL VIEW
 EXPLAIN SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 2
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Lateral View Forward
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              Statistics: Num rows: 500 Data size: 172000 Basic stats: COMPLETE Column stats: COMPLETE
-              Lateral View Join Operator
-                outputColumnNames: _col5, _col6
-                Statistics: Num rows: 1000 Data size: 364000 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col5 (type: string), _col6 (type: int)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1000 Data size: 364000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Limit
-                    Number of rows: 2
-                    Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
-                    ListSink
-            Select Operator
-              expressions: map('key1':100,'key2':200) (type: map<string,int>)
-              outputColumnNames: _col0
-              Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
-              UDTF Operator
-                Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
-                function name: explode
+            Lateral View Forward
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                Statistics: Num rows: 500 Data size: 172000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5, _col6
                   Statistics: Num rows: 1000 Data size: 364000 Basic stats: COMPLETE Column stats: COMPLETE
@@ -47,7 +29,43 @@ STAGE PLANS:
                     Limit
                       Number of rows: 2
                       Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
-                      ListSink
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              Select Operator
+                expressions: map('key1':100,'key2':200) (type: map<string,int>)
+                outputColumnNames: _col0
+                Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                UDTF Operator
+                  Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                  function name: explode
+                  Lateral View Join Operator
+                    outputColumnNames: _col5, _col6
+                    Statistics: Num rows: 1000 Data size: 364000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col5 (type: string), _col6 (type: int)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1000 Data size: 364000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Limit
+                        Number of rows: 2
+                        Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 2
+      Processor Tree:
+        ListSink
 
 PREHOOK: query: SELECT myTab.* from src LATERAL VIEW explode(map('key1', 100, 'key2', 200)) myTab limit 2
 PREHOOK: type: QUERY
@@ -64,27 +82,39 @@ PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: map('key1':100,'key2':200) (type: map<string,int>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                function name: explode
+                Limit
+                  Number of rows: 2
+                  Statistics: Num rows: 2 Data size: 768 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2 Data size: 768 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: 2
       Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: map('key1':100,'key2':200) (type: map<string,int>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 500 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
-              function name: explode
-              Limit
-                Number of rows: 2
-                Statistics: Num rows: 2 Data size: 768 Basic stats: COMPLETE Column stats: COMPLETE
-                ListSink
+        ListSink
 
 PREHOOK: query: SELECT explode(map('key1', 100, 'key2', 200)) from src limit 2
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/nonmr_fetch.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/nonmr_fetch.q.out b/ql/src/test/results/clientpositive/nonmr_fetch.q.out
index 9652d7e..d8c6da6 100644
--- a/ql/src/test/results/clientpositive/nonmr_fetch.q.out
+++ b/ql/src/test/results/clientpositive/nonmr_fetch.q.out
@@ -841,40 +841,22 @@ explain
 select key,X from srcpart lateral view explode(array(key,value)) L as x where (ds='2008-04-08' AND hr='11') limit 20
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 20
-      Processor Tree:
-        TableScan
-          alias: srcpart
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-          Lateral View Forward
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: key
+            Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Lateral View Join Operator
-                outputColumnNames: _col0, _col7
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), _col7 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                  Limit
-                    Number of rows: 20
-                    Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
-                    ListSink
-            Select Operator
-              expressions: array(key,value) (type: array<string>)
-              outputColumnNames: _col0
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              UDTF Operator
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: key
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                function name: explode
                 Lateral View Join Operator
                   outputColumnNames: _col0, _col7
                   Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
@@ -885,7 +867,43 @@ STAGE PLANS:
                     Limit
                       Number of rows: 20
                       Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
-                      ListSink
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              Select Operator
+                expressions: array(key,value) (type: array<string>)
+                outputColumnNames: _col0
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                UDTF Operator
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  function name: explode
+                  Lateral View Join Operator
+                    outputColumnNames: _col0, _col7
+                    Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: _col0 (type: string), _col7 (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+                      Limit
+                        Number of rows: 20
+                        Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 20 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 20
+      Processor Tree:
+        ListSink
 
 PREHOOK: query: select key,X from srcpart lateral view explode(array(key,value)) L as x where (ds='2008-04-08' AND hr='11') limit 20
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/select_dummy_source.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/select_dummy_source.q.out b/ql/src/test/results/clientpositive/select_dummy_source.q.out
index f04bf75..66cd013 100644
--- a/ql/src/test/results/clientpositive/select_dummy_source.q.out
+++ b/ql/src/test/results/clientpositive/select_dummy_source.q.out
@@ -190,25 +190,37 @@ POSTHOOK: query: explain
 select explode(array('a', 'b'))
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: _dummy_table
+            Row Limit Per Split: 1
+            Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: array('a','b') (type: array<string>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                function name: explode
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        TableScan
-          alias: _dummy_table
-          Row Limit Per Split: 1
-          Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: array('a','b') (type: array<string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
-              function name: explode
-              ListSink
+        ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
index 4b8ec3f..ec434f0 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
@@ -8050,12 +8050,21 @@ Plan not optimized by CBO due to missing feature [Others].
 Stage-0
    Fetch Operator
       limit:-1
-      UDTF Operator [UDTF_2]
-         function name:explode
-         Select Operator [SEL_1]
-            outputColumnNames:["_col0"]
-            TableScan [TS_0]
-               alias:_dummy_table
+      Stage-1
+         Map 1
+         File Output Operator [FS_3]
+            compressed:true
+            Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+            table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}
+            UDTF Operator [UDTF_2]
+               function name:explode
+               Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+               Select Operator [SEL_1]
+                  outputColumnNames:["_col0"]
+                  Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                  TableScan [TS_0]
+                     alias:_dummy_table
+                     Statistics:Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
 
 PREHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index 79c7116..4f69b3b 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@ -9,24 +9,43 @@ Plan not optimized by CBO.
 Stage-0
    Fetch Operator
       limit:-1
-      Select Operator [SEL_6]
-         outputColumnNames:["_col0","_col1"]
-         Lateral View Join Operator [LVJ_5]
-            outputColumnNames:["_col0","_col1","_col7"]
-            Select Operator [SEL_2]
-               outputColumnNames:["key","value"]
-               Lateral View Forward [LVF_1]
-                  TableScan [TS_0]
-                     alias:srcpart
-      Select Operator [SEL_6]
-         outputColumnNames:["_col0","_col1"]
-         Lateral View Join Operator [LVJ_5]
-            outputColumnNames:["_col0","_col1","_col7"]
-            UDTF Operator [UDTF_4]
-               function name:explode
-               Select Operator [SEL_3]
-                  outputColumnNames:["_col0"]
-                   Please refer to the previous Lateral View Forward [LVF_1]
+      Stage-1
+         Map 1
+         File Output Operator [FS_7]
+            compressed:false
+            Statistics:Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+            table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}
+            Select Operator [SEL_6]
+               outputColumnNames:["_col0","_col1"]
+               Statistics:Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+               Lateral View Join Operator [LVJ_5]
+                  outputColumnNames:["_col0","_col1","_col7"]
+                  Statistics:Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator [SEL_2]
+                     outputColumnNames:["key","value"]
+                     Statistics:Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                     Lateral View Forward [LVF_1]
+                        Statistics:Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                        TableScan [TS_0]
+                           alias:srcpart
+                           Statistics:Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+         File Output Operator [FS_7]
+            compressed:false
+            Statistics:Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+            table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}
+            Select Operator [SEL_6]
+               outputColumnNames:["_col0","_col1"]
+               Statistics:Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+               Lateral View Join Operator [LVJ_5]
+                  outputColumnNames:["_col0","_col1","_col7"]
+                  Statistics:Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+                  UDTF Operator [UDTF_4]
+                     function name:explode
+                     Statistics:Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                     Select Operator [SEL_3]
+                        outputColumnNames:["_col0"]
+                        Statistics:Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                         Please refer to the previous Lateral View Forward [LVF_1]
 
 PREHOOK: query: explain show tables
 PREHOOK: type: SHOWTABLES

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out b/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
index fa99b76..112cb21 100644
--- a/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
+++ b/ql/src/test/results/clientpositive/tez/select_dummy_source.q.out
@@ -71,22 +71,40 @@ explain
 select explode(array('a', 'b'))
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: _dummy_table
+                  Row Limit Per Split: 1
+                  Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: array('a','b') (type: array<string>)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                    UDTF Operator
+                      Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                      function name: explode
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        TableScan
-          alias: _dummy_table
-          Row Limit Per Split: 1
-          Select Operator
-            expressions: array('a','b') (type: array<string>)
-            outputColumnNames: _col0
-            UDTF Operator
-              function name: explode
-              ListSink
+        ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY
@@ -167,22 +185,40 @@ POSTHOOK: query: explain
 select explode(array('a', 'b'))
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: _dummy_table
+                  Row Limit Per Split: 1
+                  Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: array('a','b') (type: array<string>)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                    UDTF Operator
+                      Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                      function name: explode
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        TableScan
-          alias: _dummy_table
-          Row Limit Per Split: 1
-          Select Operator
-            expressions: array('a','b') (type: array<string>)
-            outputColumnNames: _col0
-            UDTF Operator
-              function name: explode
-              ListSink
+        ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/udf_explode.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_explode.q.out b/ql/src/test/results/clientpositive/udf_explode.q.out
index 21f5aa2..ee38a2e 100644
--- a/ql/src/test/results/clientpositive/udf_explode.q.out
+++ b/ql/src/test/results/clientpositive/udf_explode.q.out
@@ -39,26 +39,101 @@ TOK_QUERY
 
 
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Row Limit Per Split: 1
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            GatherStats: false
+            Select Operator
+              expressions: array(1,2,3) (type: array<int>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                function name: explode
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns col
+                        columns.types int
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        TableScan
-          alias: src
-          Row Limit Per Split: 1
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          GatherStats: false
-          Select Operator
-            expressions: array(1,2,3) (type: array<int>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
-              function name: explode
-              ListSink
+        ListSink
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol
 PREHOOK: type: QUERY
@@ -300,26 +375,101 @@ TOK_QUERY
 
 
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Row Limit Per Split: 1
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            GatherStats: false
+            Select Operator
+              expressions: map(1:'one',2:'two',3:'three') (type: map<int,string>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
+                function name: explode
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns key,value
+                        columns.types int:string
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        TableScan
-          alias: src
-          Row Limit Per Split: 1
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          GatherStats: false
-          Select Operator
-            expressions: map(1:'one',2:'two',3:'three') (type: map<int,string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
-              function name: explode
-              ListSink
+        ListSink
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/udf_inline.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_inline.q.out b/ql/src/test/results/clientpositive/udf_inline.q.out
index f986abf..076cef9 100644
--- a/ql/src/test/results/clientpositive/udf_inline.q.out
+++ b/ql/src/test/results/clientpositive/udf_inline.q.out
@@ -20,27 +20,39 @@ POSTHOOK: query: explain SELECT inline(
 )  as (id, text) FROM SRC limit 2
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: array(const struct(1,'dude!'),const struct(2,'Wheres'),const struct(3,'my car?')) (type: array<struct<col1:int,col2:string>>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 32000 Basic stats: COMPLETE Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 500 Data size: 32000 Basic stats: COMPLETE Column stats: COMPLETE
+                function name: inline
+                Limit
+                  Number of rows: 2
+                  Statistics: Num rows: 2 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: 2
       Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: array(const struct(1,'dude!'),const struct(2,'Wheres'),const struct(3,'my car?')) (type: array<struct<col1:int,col2:string>>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 32000 Basic stats: COMPLETE Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 500 Data size: 32000 Basic stats: COMPLETE Column stats: COMPLETE
-              function name: inline
-              Limit
-                Number of rows: 2
-                Statistics: Num rows: 2 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE
-                ListSink
+        ListSink
 
 PREHOOK: query: SELECT inline( 
   ARRAY(

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/udtf_explode.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udtf_explode.q.out b/ql/src/test/results/clientpositive/udtf_explode.q.out
index 2902df9..fa67a56 100644
--- a/ql/src/test/results/clientpositive/udtf_explode.q.out
+++ b/ql/src/test/results/clientpositive/udtf_explode.q.out
@@ -38,28 +38,103 @@ TOK_QUERY
 
 
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            GatherStats: false
+            Select Operator
+              expressions: array(1,2,3) (type: array<int>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                function name: explode
+                Limit
+                  Number of rows: 3
+                  Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          columns col
+                          columns.types int
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
   Stage: Stage-0
     Fetch Operator
       limit: 3
       Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          GatherStats: false
-          Select Operator
-            expressions: array(1,2,3) (type: array<int>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 500 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
-              function name: explode
-              Limit
-                Number of rows: 3
-                Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE
-                ListSink
+        ListSink
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol
 PREHOOK: type: QUERY
@@ -336,27 +411,39 @@ PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: map(1:'one',2:'two',3:'three') (type: map<int,string>)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
+              UDTF Operator
+                Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
+                function name: explode
+                Limit
+                  Number of rows: 3
+                  Statistics: Num rows: 3 Data size: 1557 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 3 Data size: 1557 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: 3
       Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: map(1:'one',2:'two',3:'three') (type: map<int,string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
-            UDTF Operator
-              Statistics: Num rows: 500 Data size: 259500 Basic stats: COMPLETE Column stats: COMPLETE
-              function name: explode
-              Limit
-                Number of rows: 3
-                Statistics: Num rows: 3 Data size: 1557 Basic stats: COMPLETE Column stats: COMPLETE
-                ListSink
+        ListSink
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT a.myKey, a.myVal, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3) a GROUP BY a.myKey, a.myVal
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/370a807d/ql/src/test/results/clientpositive/udtf_nofetchtask.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udtf_nofetchtask.q.out b/ql/src/test/results/clientpositive/udtf_nofetchtask.q.out
new file mode 100644
index 0000000..86929ea
--- /dev/null
+++ b/ql/src/test/results/clientpositive/udtf_nofetchtask.q.out
@@ -0,0 +1,30 @@
+PREHOOK: query: create temporary function udtfCount2 as 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: udtfcount2
+POSTHOOK: query: create temporary function udtfCount2 as 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: udtfcount2
+PREHOOK: query: -- Correct output should be 2 rows
+select udtfCount2() from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Correct output should be 2 rows
+select udtfCount2() from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+500
+500
+PREHOOK: query: -- Should still have the same output with fetch task conversion enabled
+select udtfCount2() from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Should still have the same output with fetch task conversion enabled
+select udtfCount2() from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+500
+500