You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/12/06 07:16:52 UTC

svn commit: r1417743 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientnegative/ test/results/clientpositive/

Author: namit
Date: Thu Dec  6 06:16:51 2012
New Revision: 1417743

URL: http://svn.apache.org/viewvc?rev=1417743&view=rev
Log:
HIVE-2723 should throw "Ambiguous column reference key" Exception in particular
join condition (Navis via namit)



Added:
    hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col0.q
    hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q
    hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q
    hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col0.q.out
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col1.q.out
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col2.q.out
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col_patterned.q.out
    hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1417743&r1=1417742&r2=1417743&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Thu Dec  6 06:16:51 2012
@@ -29,6 +29,7 @@ import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 
@@ -165,6 +166,17 @@ public class RowResolver implements Seri
     return ret;
   }
 
+  /**
+   * check if column name is already exist in RR
+   */
+  public void checkColumn(String tableAlias, String columnAlias) throws SemanticException {
+    ColumnInfo prev = get(null, columnAlias);
+    if (prev != null &&
+        (tableAlias == null || !tableAlias.equalsIgnoreCase(prev.getTabAlias()))) {
+      throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(columnAlias));
+    }
+  }
+
   public ArrayList<ColumnInfo> getColumnInfos() {
     return rowSchema.getSignature();
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1417743&r1=1417742&r2=1417743&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Dec  6 06:16:51 2012
@@ -1588,7 +1588,7 @@ public class SemanticAnalyzer extends Ba
   @SuppressWarnings("nls")
   private Integer genColListRegex(String colRegex, String tabAlias,
       ASTNode sel, ArrayList<ExprNodeDesc> col_list,
-      RowResolver input, Integer pos, RowResolver output, List<String> aliases)
+      RowResolver input, Integer pos, RowResolver output, List<String> aliases, boolean subQuery)
       throws SemanticException {
 
     // The table alias should exist
@@ -1642,6 +1642,9 @@ public class SemanticAnalyzer extends Ba
 
         ExprNodeColumnDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
             name, colInfo.getTabAlias(), colInfo.getIsVirtualCol(), colInfo.isSkewedCol());
+        if (subQuery) {
+          output.checkColumn(tmp[0], tmp[1]);
+        }
         col_list.add(expr);
         output.put(tmp[0], tmp[1],
             new ColumnInfo(getColumnInternalName(pos), colInfo.getType(),
@@ -2231,7 +2234,6 @@ public class SemanticAnalyzer extends Ba
     ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>();
     RowResolver out_rwsch = new RowResolver();
     ASTNode trfm = null;
-    String alias = qb.getParseInfo().getAlias();
     Integer pos = Integer.valueOf(0);
     RowResolver inputRR = opParseCtx.get(input).getRowResolver();
     // SELECT * or SELECT TRANSFORM(*)
@@ -2378,10 +2380,11 @@ public class SemanticAnalyzer extends Ba
 
       }
 
+      boolean subQuery = qb.getParseInfo().getIsSubQ();
       if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
         pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
             : getUnescapedName((ASTNode)expr.getChild(0)).toLowerCase(),
-            expr, col_list, inputRR, pos, out_rwsch, qb.getAliases());
+            expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
         selectStar = true;
       } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
           && !inputRR.getIsExprResolver()
@@ -2390,7 +2393,7 @@ public class SemanticAnalyzer extends Ba
         // This can only happen without AS clause
         // We don't allow this for ExprResolver - the Group By case
         pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
-            null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases());
+            null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
       } else if (expr.getType() == HiveParser.DOT
           && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
           && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
@@ -2403,7 +2406,7 @@ public class SemanticAnalyzer extends Ba
         pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
             unescapeIdentifier(expr.getChild(0).getChild(0).getText()
             .toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
-            qb.getAliases());
+            qb.getAliases(), subQuery);
       } else {
         // Case when this is an expression
         TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
@@ -2411,9 +2414,8 @@ public class SemanticAnalyzer extends Ba
         tcCtx.setAllowStatefulFunctions(true);
         ExprNodeDesc exp = genExprNodeDesc(expr, inputRR, tcCtx);
         col_list.add(exp);
-        if (!StringUtils.isEmpty(alias)
-            && (out_rwsch.get(null, colAlias) != null)) {
-          throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(colAlias));
+        if (subQuery) {
+          out_rwsch.checkColumn(tabAlias, colAlias);
         }
 
         ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),

Added: hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col0.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col0.q?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col0.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col0.q Thu Dec  6 06:16:51 2012
@@ -0,0 +1,2 @@
+-- TOK_ALLCOLREF
+explain select * from (select * from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;

Added: hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q Thu Dec  6 06:16:51 2012
@@ -0,0 +1,2 @@
+-- TOK_TABLE_OR_COL
+explain select * from (select `.*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;

Added: hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q Thu Dec  6 06:16:51 2012
@@ -0,0 +1,2 @@
+-- DOT
+explain select * from (select a.`[kv].*`, b.`[kv].*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;

Added: hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q Thu Dec  6 06:16:51 2012
@@ -0,0 +1,6 @@
+-- TOK_ALLCOLREF
+explain select * from (select a.key, a.* from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;
+-- DOT
+explain select * from (select a.key, a.`[k].*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;
+-- EXPRESSION
+explain select * from (select a.key, a.key from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;

Added: hive/trunk/ql/src/test/results/clientnegative/ambiguous_col0.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ambiguous_col0.q.out?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ambiguous_col0.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/ambiguous_col0.q.out Thu Dec  6 06:16:51 2012
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10007]: Ambiguous column reference key

Added: hive/trunk/ql/src/test/results/clientnegative/ambiguous_col1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ambiguous_col1.q.out?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ambiguous_col1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/ambiguous_col1.q.out Thu Dec  6 06:16:51 2012
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10007]: Ambiguous column reference key

Added: hive/trunk/ql/src/test/results/clientnegative/ambiguous_col2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ambiguous_col2.q.out?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ambiguous_col2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/ambiguous_col2.q.out Thu Dec  6 06:16:51 2012
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10007]: Ambiguous column reference key

Added: hive/trunk/ql/src/test/results/clientnegative/ambiguous_col_patterned.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ambiguous_col_patterned.q.out?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ambiguous_col_patterned.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/ambiguous_col_patterned.q.out Thu Dec  6 06:16:51 2012
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Ambiguous column reference key

Added: hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out?rev=1417743&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out Thu Dec  6 06:16:51 2012
@@ -0,0 +1,263 @@
+PREHOOK: query: -- TOK_ALLCOLREF
+explain select * from (select a.key, a.* from (select * from src) a join (select * from src1) b on (a.key = b.key)) t
+PREHOOK: type: QUERY
+POSTHOOK: query: -- TOK_ALLCOLREF
+explain select * from (select a.key, a.* from (select * from src) a join (select * from src1) b on (a.key = b.key)) t
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a)))))) t)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t:a:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: string
+        t:b:src1 
+          TableScan
+            alias: src1
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: _col0
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 1
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1}
+            1 
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col1, _col2
+            Select Operator
+              expressions:
+                    expr: _col1
+                    type: string
+                    expr: _col2
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: -- DOT
+explain select * from (select a.key, a.`[k].*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t
+PREHOOK: type: QUERY
+POSTHOOK: query: -- DOT
+explain select * from (select a.key, a.`[k].*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) `[k].*`))))) t)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t:a:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: _col0
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: _col0
+                      type: string
+        t:b:src1 
+          TableScan
+            alias: src1
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: _col0
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 1
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 
+          handleSkewJoin: false
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+            outputColumnNames: _col1
+            Select Operator
+              expressions:
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: -- EXPRESSION
+explain select * from (select a.key, a.key from (select * from src) a join (select * from src1) b on (a.key = b.key)) t
+PREHOOK: type: QUERY
+POSTHOOK: query: -- EXPRESSION
+explain select * from (select a.key, a.key from (select * from src) a join (select * from src1) b on (a.key = b.key)) t
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))))) t)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t:a:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: _col0
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: _col0
+                      type: string
+        t:b:src1 
+          TableScan
+            alias: src1
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              outputColumnNames: _col0
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 1
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 
+          handleSkewJoin: false
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+            outputColumnNames: _col1
+            Select Operator
+              expressions:
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+