You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ec...@apache.org on 2013/06/19 03:02:45 UTC

svn commit: r1494408 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ java/org/apache/hadoop/hive/ql/udf/generic/ test/queries/clientpositive/ test/results/clientpo...

Author: ecapriolo
Date: Wed Jun 19 01:02:45 2013
New Revision: 1494408

URL: http://svn.apache.org/r1494408
Log:
HIVE-4616: Support outer lateral view


Submitted by:	Navis	
Reviewed by:	Edward Capriolo
Approved by:	Edward Capriolo

Added:
    hive/trunk/ql/src/test/queries/clientpositive/lateral_view_outer.q
    hive/trunk/ql/src/test/results/clientpositive/lateral_view_outer.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java?rev=1494408&r1=1494407&r2=1494408&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java Wed Jun 19 01:02:45 2013
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.Serializable;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -28,6 +29,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.UDTFDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.ql.udf.generic.UDTFCollector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
@@ -45,7 +47,10 @@ public class UDTFOperator extends Operat
 
   ObjectInspector[] udtfInputOIs = null;
   Object[] objToSendToUDTF = null;
-  Object[] forwardObj = new Object[1];
+
+  GenericUDTF genericUDTF;
+  UDTFCollector collector;
+  List outerObj;
 
   /**
    * sends periodic reports back to the tracker.
@@ -54,7 +59,10 @@ public class UDTFOperator extends Operat
 
   @Override
   protected void initializeOp(Configuration hconf) throws HiveException {
-    conf.getGenericUDTF().setCollector(new UDTFCollector(this));
+    genericUDTF = conf.getGenericUDTF();
+    collector = new UDTFCollector(this);
+
+    genericUDTF.setCollector(collector);
 
     // Make an object inspector [] of the arguments to the UDTF
     List<? extends StructField> inputFields =
@@ -68,10 +76,13 @@ public class UDTFOperator extends Operat
 
     MapredContext context = MapredContext.get();
     if (context != null) {
-      context.setup(conf.getGenericUDTF());
+      context.setup(genericUDTF);
     }
-    StructObjectInspector udtfOutputOI = conf.getGenericUDTF().initialize(
+    StructObjectInspector udtfOutputOI = genericUDTF.initialize(
         udtfInputOIs);
+    if (conf.isOuterLV()) {
+      outerObj = Arrays.asList(new Object[udtfOutputOI.getAllStructFieldRefs().size()]);
+    }
 
     // Since we're passing the object output by the UDTF directly to the next
     // operator, we can use the same OI.
@@ -100,8 +111,11 @@ public class UDTFOperator extends Operat
       objToSendToUDTF[i] = soi.getStructFieldData(row, fields.get(i));
     }
 
-    conf.getGenericUDTF().process(objToSendToUDTF);
-
+    genericUDTF.process(objToSendToUDTF);
+    if (conf.isOuterLV() && collector.getCounter() == 0) {
+      collector.collect(outerObj);
+    }
+    collector.reset();
   }
 
   /**

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g?rev=1494408&r1=1494407&r2=1494408&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g Wed Jun 19 01:02:45 2013
@@ -125,7 +125,11 @@ lateralView
 @init {gParent.msgs.push("lateral view"); }
 @after {gParent.msgs.pop(); }
 	:
-	KW_LATERAL KW_VIEW function tableAlias KW_AS identifier (COMMA identifier)* -> ^(TOK_LATERAL_VIEW ^(TOK_SELECT ^(TOK_SELEXPR function identifier+ tableAlias)))
+	KW_LATERAL KW_VIEW KW_OUTER function tableAlias KW_AS identifier (COMMA identifier)*
+	-> ^(TOK_LATERAL_VIEW_OUTER ^(TOK_SELECT ^(TOK_SELEXPR function identifier+ tableAlias)))
+	|
+	KW_LATERAL KW_VIEW function tableAlias KW_AS identifier (COMMA identifier)*
+	-> ^(TOK_LATERAL_VIEW ^(TOK_SELECT ^(TOK_SELEXPR function identifier+ tableAlias)))
 	;
 
 tableAlias

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1494408&r1=1494407&r2=1494408&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Wed Jun 19 01:02:45 2013
@@ -240,6 +240,7 @@ TOK_RECORDREADER;
 TOK_RECORDWRITER;
 TOK_LEFTSEMIJOIN;
 TOK_LATERAL_VIEW;
+TOK_LATERAL_VIEW_OUTER;
 TOK_TABALIAS;
 TOK_ANALYZE;
 TOK_CREATEROLE;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1494408&r1=1494407&r2=1494408&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Jun 19 01:02:45 2013
@@ -710,7 +710,8 @@ public class SemanticAnalyzer extends Ba
               "PTF invocation in a Join must have an alias"));
         }
 
-      } else if (child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW) {
+      } else if (child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
+          child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) {
         // SELECT * FROM src1 LATERAL VIEW udtf() AS myTable JOIN src2 ...
         // is not supported. Instead, the lateral view must be in a subquery
         // SELECT * FROM (SELECT * FROM src1 LATERAL VIEW udtf() AS myTable) a
@@ -751,6 +752,7 @@ public class SemanticAnalyzer extends Ba
       alias = processSubQuery(qb, next);
       break;
     case HiveParser.TOK_LATERAL_VIEW:
+    case HiveParser.TOK_LATERAL_VIEW_OUTER:
       alias = processLateralView(qb, next);
       break;
     default:
@@ -849,7 +851,8 @@ public class SemanticAnalyzer extends Ba
           processTable(qb, frm);
         } else if (frm.getToken().getType() == HiveParser.TOK_SUBQUERY) {
           processSubQuery(qb, frm);
-        } else if (frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW) {
+        } else if (frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
+            frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) {
           processLateralView(qb, frm);
         } else if (isJoinToken(frm)) {
           queryProperties.setHasJoin(true);
@@ -1026,6 +1029,7 @@ public class SemanticAnalyzer extends Ba
         skipRecursion = false;
         break;
       case HiveParser.TOK_LATERAL_VIEW:
+      case HiveParser.TOK_LATERAL_VIEW_OUTER:
         // todo: nested LV
         assert ast.getChildCount() == 1;
         qb.getParseInfo().getDestToLateralView().put(ctx_1.dest, ast);
@@ -2441,7 +2445,7 @@ public class SemanticAnalyzer extends Ba
       throws SemanticException {
     ASTNode selExprList = qb.getParseInfo().getSelForClause(dest);
 
-    Operator<?> op = genSelectPlan(selExprList, qb, input);
+    Operator<?> op = genSelectPlan(selExprList, qb, input, false);
 
     if (LOG.isDebugEnabled()) {
       LOG.debug("Created Select Plan for clause: " + dest);
@@ -2452,7 +2456,7 @@ public class SemanticAnalyzer extends Ba
 
   @SuppressWarnings("nls")
   private Operator<?> genSelectPlan(ASTNode selExprList, QB qb,
-      Operator<?> input) throws SemanticException {
+      Operator<?> input, boolean outerLV) throws SemanticException {
 
     if (LOG.isDebugEnabled()) {
       LOG.debug("tree: " + selExprList.toStringTree());
@@ -2691,7 +2695,7 @@ public class SemanticAnalyzer extends Ba
 
     if (isUDTF) {
       output = genUDTFPlan(genericUDTF, udtfTableAlias, udtfColAliases, qb,
-          output);
+          output, outerLV);
     }
     if (LOG.isDebugEnabled()) {
       LOG.debug("Created Select Plan row schema: " + out_rwsch.toString());
@@ -5474,7 +5478,7 @@ public class SemanticAnalyzer extends Ba
 
   private Operator genUDTFPlan(GenericUDTF genericUDTF,
       String outputTableAlias, ArrayList<String> colAliases, QB qb,
-      Operator input) throws SemanticException {
+      Operator input, boolean outerLV) throws SemanticException {
 
     // No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
     QBParseInfo qbp = qb.getParseInfo();
@@ -5552,7 +5556,7 @@ public class SemanticAnalyzer extends Ba
 
     // Add the UDTFOperator to the operator DAG
     Operator<?> udtf = putOpInsertMap(OperatorFactory.getAndMakeChild(
-        new UDTFDesc(genericUDTF), new RowSchema(out_rwsch.getColumnInfos()),
+        new UDTFDesc(genericUDTF, outerLV), new RowSchema(out_rwsch.getColumnInfos()),
         input), out_rwsch);
     return udtf;
   }
@@ -8148,7 +8152,8 @@ public class SemanticAnalyzer extends Ba
     // Get the UDTF Path
     QB blankQb = new QB(null, null, false);
     Operator udtfPath = genSelectPlan((ASTNode) lateralViewTree
-        .getChild(0), blankQb, lvForward);
+        .getChild(0), blankQb, lvForward,
+        lateralViewTree.getType() == HiveParser.TOK_LATERAL_VIEW_OUTER);
     // add udtf aliases to QB
     for (String udtfAlias : blankQb.getAliases()) {
       qb.addAlias(udtfAlias);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java?rev=1494408&r1=1494407&r2=1494408&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java Wed Jun 19 01:02:45 2013
@@ -31,12 +31,14 @@ public class UDTFDesc extends AbstractOp
   private static final long serialVersionUID = 1L;
 
   private GenericUDTF genericUDTF;
+  private boolean outerLV;
 
   public UDTFDesc() {
   }
 
-  public UDTFDesc(final GenericUDTF genericUDTF) {
+  public UDTFDesc(final GenericUDTF genericUDTF, boolean outerLV) {
     this.genericUDTF = genericUDTF;
+    this.outerLV = outerLV;
   }
 
   public GenericUDTF getGenericUDTF() {
@@ -51,4 +53,17 @@ public class UDTFDesc extends AbstractOp
   public String getUDTFName() {
     return genericUDTF.toString();
   }
+
+  public boolean isOuterLV() {
+    return outerLV;
+  }
+
+  public void setOuterLV(boolean outerLV) {
+    this.outerLV = outerLV;
+  }
+
+  @Explain(displayName = "outer lateral view")
+  public String isOuterLateralView() {
+    return outerLV ? "true" : null;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java?rev=1494408&r1=1494407&r2=1494408&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java Wed Jun 19 01:02:45 2013
@@ -33,7 +33,8 @@ public class UDTFCollector implements Co
    * @see
    * org.apache.hadoop.hive.ql.udf.generic.Collector#collect(java.lang.Object)
    */
-  UDTFOperator op = null;
+  final UDTFOperator op;
+  private transient int counter;
 
   public UDTFCollector(UDTFOperator op) {
     this.op = op;
@@ -42,6 +43,14 @@ public class UDTFCollector implements Co
   @Override
   public void collect(Object input) throws HiveException {
     op.forwardUDTFOutput(input);
+    counter++;
   }
 
+  public int getCounter() {
+    return counter;
+  }
+
+  public void reset() {
+    counter = 0;
+  }
 }

Added: hive/trunk/ql/src/test/queries/clientpositive/lateral_view_outer.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/lateral_view_outer.q?rev=1494408&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/lateral_view_outer.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/lateral_view_outer.q Wed Jun 19 01:02:45 2013
@@ -0,0 +1,15 @@
+-- UDTF forwards nothing, OUTER LV add null for that
+explain
+select * from src LATERAL VIEW OUTER explode(array()) C AS a limit 10;
+select * from src LATERAL VIEW OUTER explode(array()) C AS a limit 10;
+
+-- backward compatible (UDTF forwards something for OUTER LV)
+explain
+select * from src LATERAL VIEW OUTER explode(array(4,5)) C AS a limit 10;
+select * from src LATERAL VIEW OUTER explode(array(4,5)) C AS a limit 10;
+
+create table array_valued as select key, if (key > 300, array(value, value), null) as value from src;
+
+explain
+select * from array_valued LATERAL VIEW OUTER explode(value) C AS a limit 10;
+select * from array_valued LATERAL VIEW OUTER explode(value) C AS a limit 10;

Added: hive/trunk/ql/src/test/results/clientpositive/lateral_view_outer.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/lateral_view_outer.q.out?rev=1494408&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/lateral_view_outer.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/lateral_view_outer.q.out Wed Jun 19 01:02:45 2013
@@ -0,0 +1,299 @@
+PREHOOK: query: -- UDTF forwards nothing, OUTER LV add null for that
+explain
+select * from src LATERAL VIEW OUTER explode(array()) C AS a limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: -- UDTF forwards nothing, OUTER LV add null for that
+explain
+select * from src LATERAL VIEW OUTER explode(array()) C AS a limit 10
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_LATERAL_VIEW_OUTER (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array)) a (TOK_TABALIAS C))) (TOK_TABREF (TOK_TABNAME src)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Lateral View Forward
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                outputColumnNames: key, value
+                Lateral View Join Operator
+                  outputColumnNames: _col0, _col1, _col4
+                  Select Operator
+                    expressions:
+                          expr: _col0
+                          type: string
+                          expr: _col1
+                          type: string
+                          expr: _col4
+                          type: string
+                    outputColumnNames: _col0, _col1, _col2
+                    Limit
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 0
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              Select Operator
+                expressions:
+                      expr: array()
+                      type: array<string>
+                outputColumnNames: _col0
+                UDTF Operator
+                  function name: explode
+                  outer lateral view: true
+                  Lateral View Join Operator
+                    outputColumnNames: _col0, _col1, _col4
+                    Select Operator
+                      expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: string
+                            expr: _col4
+                            type: string
+                      outputColumnNames: _col0, _col1, _col2
+                      Limit
+                        File Output Operator
+                          compressed: false
+                          GlobalTableId: 0
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+
+
+PREHOOK: query: select * from src LATERAL VIEW OUTER explode(array()) C AS a limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src LATERAL VIEW OUTER explode(array()) C AS a limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+238	val_238	NULL
+86	val_86	NULL
+311	val_311	NULL
+27	val_27	NULL
+165	val_165	NULL
+409	val_409	NULL
+255	val_255	NULL
+278	val_278	NULL
+98	val_98	NULL
+484	val_484	NULL
+PREHOOK: query: -- backward compatible (UDTF forwards something for OUTER LV)
+explain
+select * from src LATERAL VIEW OUTER explode(array(4,5)) C AS a limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: -- backward compatible (UDTF forwards something for OUTER LV)
+explain
+select * from src LATERAL VIEW OUTER explode(array(4,5)) C AS a limit 10
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_LATERAL_VIEW_OUTER (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 4 5)) a (TOK_TABALIAS C))) (TOK_TABREF (TOK_TABNAME src)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Lateral View Forward
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                outputColumnNames: key, value
+                Lateral View Join Operator
+                  outputColumnNames: _col0, _col1, _col4
+                  Select Operator
+                    expressions:
+                          expr: _col0
+                          type: string
+                          expr: _col1
+                          type: string
+                          expr: _col4
+                          type: int
+                    outputColumnNames: _col0, _col1, _col2
+                    Limit
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 0
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              Select Operator
+                expressions:
+                      expr: array(4,5)
+                      type: array<int>
+                outputColumnNames: _col0
+                UDTF Operator
+                  function name: explode
+                  outer lateral view: true
+                  Lateral View Join Operator
+                    outputColumnNames: _col0, _col1, _col4
+                    Select Operator
+                      expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: string
+                            expr: _col4
+                            type: int
+                      outputColumnNames: _col0, _col1, _col2
+                      Limit
+                        File Output Operator
+                          compressed: false
+                          GlobalTableId: 0
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+
+
+PREHOOK: query: select * from src LATERAL VIEW OUTER explode(array(4,5)) C AS a limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src LATERAL VIEW OUTER explode(array(4,5)) C AS a limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+238	val_238	4
+238	val_238	5
+86	val_86	4
+86	val_86	5
+311	val_311	4
+311	val_311	5
+27	val_27	4
+27	val_27	5
+165	val_165	4
+165	val_165	5
+PREHOOK: query: create table array_valued as select key, if (key > 300, array(value, value), null) as value from src
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: create table array_valued as select key, if (key > 300, array(value, value), null) as value from src
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@array_valued
+PREHOOK: query: explain
+select * from array_valued LATERAL VIEW OUTER explode(value) C AS a limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from array_valued LATERAL VIEW OUTER explode(value) C AS a limit 10
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_LATERAL_VIEW_OUTER (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_TABLE_OR_COL value)) a (TOK_TABALIAS C))) (TOK_TABREF (TOK_TABNAME array_valued)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        array_valued 
+          TableScan
+            alias: array_valued
+            Lateral View Forward
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: array<string>
+                outputColumnNames: key, value
+                Lateral View Join Operator
+                  outputColumnNames: _col0, _col1, _col4
+                  Select Operator
+                    expressions:
+                          expr: _col0
+                          type: string
+                          expr: _col1
+                          type: array<string>
+                          expr: _col4
+                          type: string
+                    outputColumnNames: _col0, _col1, _col2
+                    Limit
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 0
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              Select Operator
+                expressions:
+                      expr: value
+                      type: array<string>
+                outputColumnNames: _col0
+                UDTF Operator
+                  function name: explode
+                  outer lateral view: true
+                  Lateral View Join Operator
+                    outputColumnNames: _col0, _col1, _col4
+                    Select Operator
+                      expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: array<string>
+                            expr: _col4
+                            type: string
+                      outputColumnNames: _col0, _col1, _col2
+                      Limit
+                        File Output Operator
+                          compressed: false
+                          GlobalTableId: 0
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+
+
+PREHOOK: query: select * from array_valued LATERAL VIEW OUTER explode(value) C AS a limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@array_valued
+#### A masked pattern was here ####
+POSTHOOK: query: select * from array_valued LATERAL VIEW OUTER explode(value) C AS a limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@array_valued
+#### A masked pattern was here ####
+238	NULL	NULL
+86	NULL	NULL
+311	["val_311","val_311"]	val_311
+311	["val_311","val_311"]	val_311
+27	NULL	NULL
+165	NULL	NULL
+409	["val_409","val_409"]	val_409
+409	["val_409","val_409"]	val_409
+255	NULL	NULL
+278	NULL	NULL