You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/04/11 04:03:34 UTC

svn commit: r1466754 [1/3] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/optimizer/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ java/org/apache/hadoop/hive/ql/udf/ptf/ test/queries/clientpositive/ test/results/c...

Author: hashutosh
Date: Thu Apr 11 02:03:33 2013
New Revision: 1466754

URL: http://svn.apache.org/r1466754
Log:
HIVE-4306 : PTFDeserializer should reconstruct OIs based on InputOI passed to PTFOperator (Harish Butani and Prajakta Kalmegh via Ashutosh Chauhan)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java
    hive/trunk/ql/src/test/queries/clientpositive/ptf_npath.q
    hive/trunk/ql/src/test/results/clientpositive/ptf.q.out
    hive/trunk/ql/src/test/results/clientpositive/ptf_npath.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1466754&r1=1466753&r2=1466754&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Thu Apr 11 02:03:33 2013
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.optimizer;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -68,7 +67,6 @@ import org.apache.hadoop.hive.ql.plan.Ma
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFExpressionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.ShapeDetails;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowExpressionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFunctionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowTableFunctionDef;
@@ -79,8 +77,6 @@ import org.apache.hadoop.hive.ql.plan.Ta
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 /**
  * Factory for generating the different node processors used by ColumnPruner.
@@ -163,16 +159,11 @@ public final class ColumnPrunerProcFacto
 
   /**
    * - Pruning can only be done for Windowing. PTFs are black boxes,
-   * we assume all columns are needed.
+   *   we assume all columns are needed.
    * - add column names referenced in WindowFn args and in WindowFn expressions
-   * to the pruned list of the child Select Op.
-   * - Prune the Column names & types serde properties in each of the Shapes in the PTF Chain:
-   *    - the InputDef's output shape
-   *    - Window Tabl Functions: window output shape & output shape.
-   * - Why is pruning the Column names & types in the serde properties enough?
-   *   - because during runtime we rebuild the OIs using these properties.
+   *   to the pruned list of the child Select Op.
    * - finally we set the prunedColList on the ColumnPrunerContx;
-   * and update the RR & signature on the PTFOp.
+   *   and update the RR & signature on the PTFOp.
    */
   public static class ColumnPrunerPTFProc implements NodeProcessor {
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
@@ -194,10 +185,6 @@ public final class ColumnPrunerProcFacto
       //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
       prunedCols = new ArrayList<String>(prunedCols);
       prunedColumnsList(prunedCols, def);
-      setSerdePropsOfShape(def.getInput().getOutputShape(), prunedCols);
-      setSerdePropsOfShape(def.getOutputFromWdwFnProcessing(), prunedCols);
-      setSerdePropsOfShape(def.getOutputShape(), prunedCols);
-
       RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
       RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
       cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
@@ -255,47 +242,6 @@ public final class ColumnPrunerProcFacto
        }
     }
 
-    private List<String> getLowerCasePrunedCols(List<String> prunedCols){
-      List<String> lowerCasePrunedCols = new ArrayList<String>();
-      for (String col : prunedCols) {
-        lowerCasePrunedCols.add(col.toLowerCase());
-      }
-      return lowerCasePrunedCols;
-    }
-
-    /*
-     * reconstruct Column names & types list based on the prunedCols list.
-     */
-    private void setSerdePropsOfShape(ShapeDetails shp, List<String> prunedCols) {
-      List<String> columnNames = Arrays.asList(shp.getSerdeProps().get(
-          org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS).split(","));
-      List<TypeInfo> columnTypes = TypeInfoUtils
-          .getTypeInfosFromTypeString(shp.getSerdeProps().get(
-              org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES));
-      /*
-       * fieldNames in OI are lower-cased. So we compare lower cased names for now.
-       */
-      prunedCols = getLowerCasePrunedCols(prunedCols);
-
-      StringBuilder cNames = new StringBuilder();
-      StringBuilder cTypes = new StringBuilder();
-
-      boolean addComma = false;
-      for(int i=0; i < columnNames.size(); i++) {
-        if ( prunedCols.contains(columnNames.get(i)) ) {
-          cNames.append(addComma ? "," : "");
-          cTypes.append(addComma ? "," : "");
-          cNames.append(columnNames.get(i));
-          cTypes.append(columnTypes.get(i));
-          addComma = true;
-        }
-      }
-      shp.getSerdeProps().put(
-          org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, cNames.toString());
-      shp.getSerdeProps().put(
-          org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, cTypes.toString());
-    }
-
     /*
      * from the prunedCols list filter out columns that refer to WindowFns or WindowExprs
      * the returned list is set as the prunedList needed by the PTFOp.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java?rev=1466754&r1=1466753&r2=1466754&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java Thu Apr 11 02:03:33 2013
@@ -909,7 +909,7 @@ public class PTFTranslator {
   }
 
   @SuppressWarnings({"unchecked"})
-  private static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI,
+  public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI,
       Map<String,String> serdePropsMap) {
 
     if ( serdePropsMap == null ) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1466754&r1=1466753&r2=1466754&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Apr 11 02:03:33 2013
@@ -10546,34 +10546,32 @@ public class SemanticAnalyzer extends Ba
         new LinkedHashMap<String[], ColumnInfo>();
     pos = 0;
     for (ColumnInfo colInfo : colInfoList) {
-      if (!colInfo.isHiddenVirtualCol()) {
-        String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
+      String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
+      /*
+       * if we have already encountered this colInfo internalName.
+       * We encounter it again because it must be put for the Having clause.
+       * We will add these entries in the end; in a loop on colsAddedByHaving. See below.
+       */
+      if ( colsAddedByHaving.containsKey(alias)) {
+        continue;
+      }
+      ASTNode astNode = PTFTranslator.getASTNode(colInfo, inputRR);
+      ColumnInfo eColInfo = new ColumnInfo(
+          SemanticAnalyzer.getColumnInternalName(pos++), colInfo.getType(), alias[0],
+          colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
+
+      if ( astNode == null ) {
+        extractRR.put(alias[0], alias[1], eColInfo);
+      }
+      else {
         /*
-         * if we have already encountered this colInfo internalName.
-         * We encounter it again because it must be put for the Having clause.
-         * We will add these entries in the end; in a loop on colsAddedByHaving. See below.
+         * in case having clause refers to this column may have been added twice;
+         * once with the ASTNode.toStringTree as the alias
+         * and then with the real alias.
          */
-        if ( colsAddedByHaving.containsKey(alias)) {
-          continue;
-        }
-        ASTNode astNode = PTFTranslator.getASTNode(colInfo, inputRR);
-        ColumnInfo eColInfo = new ColumnInfo(
-            SemanticAnalyzer.getColumnInternalName(pos++), colInfo.getType(), alias[0],
-            colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
-
-        if ( astNode == null ) {
-          extractRR.put(alias[0], alias[1], eColInfo);
-        }
-        else {
-          /*
-           * in case having clause refers to this column may have been added twice;
-           * once with the ASTNode.toStringTree as the alias
-           * and then with the real alias.
-           */
-          extractRR.putExpression(astNode, eColInfo);
-          if ( !astNode.toStringTree().toLowerCase().equals(alias[1]) ) {
-            colsAddedByHaving.put(alias, eColInfo);
-          }
+        extractRR.putExpression(astNode, eColInfo);
+        if ( !astNode.toStringTree().toLowerCase().equals(alias[1]) ) {
+          colsAddedByHaving.put(alias, eColInfo);
         }
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1466754&r1=1466753&r2=1466754&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Thu Apr 11 02:03:33 2013
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -54,8 +55,12 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 
+@SuppressWarnings("deprecation")
 public class PTFDeserializer {
 
   PTFDesc ptfDesc;
@@ -83,7 +88,7 @@ public class PTFDeserializer {
     while ( !ptfChain.isEmpty() ) {
       currentDef = ptfChain.pop();
       if ( currentDef instanceof PTFQueryInputDef) {
-        initialize((PTFQueryInputDef)currentDef);
+        initialize((PTFQueryInputDef)currentDef, inputOI);
       }
       else if ( currentDef instanceof WindowTableFunctionDef) {
         initializeWindowing((WindowTableFunctionDef)currentDef);
@@ -101,8 +106,6 @@ public class PTFDeserializer {
      * 1. setup resolve, make connections
      */
     TableFunctionEvaluator tEval = def.getTFunction();
-    /*WindowingTableFunctionResolver tResolver = (WindowingTableFunctionResolver)
-        FunctionRegistry.getTableFunctionResolver(def.getName());*/
     WindowingTableFunctionResolver tResolver =
         (WindowingTableFunctionResolver) constructResolver(def.getResolverClassName());
     tResolver.initialize(ptfDesc, def, tEval);
@@ -141,7 +144,7 @@ public class PTFDeserializer {
       StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(
           aliases, fieldOIs);
       tResolver.setWdwProcessingOutputOI(wdwOutOI);
-      initialize(def.getOutputFromWdwFnProcessing());
+      initialize(def.getOutputFromWdwFnProcessing(), wdwOutOI);
     }
     else {
       def.setOutputFromWdwFnProcessing(inpShape);
@@ -161,8 +164,8 @@ public class PTFDeserializer {
     /*
      * 4. give Evaluator chance to setup for Output execution; setup Output shape.
      */
-    initialize(def.getOutputShape());
     tResolver.initializeOutputOI();
+    initialize(def.getOutputShape(), tEval.getOutputOI());
 
     /*
      * If we have windowExpressions then we convert to Std. Object to process;
@@ -175,9 +178,9 @@ public class PTFDeserializer {
     }
   }
 
-  protected void initialize(PTFQueryInputDef def) throws HiveException {
+  protected void initialize(PTFQueryInputDef def, StructObjectInspector OI) throws HiveException {
     ShapeDetails outShape = def.getOutputShape();
-    initialize(outShape);
+    initialize(outShape, OI);
   }
 
   protected void initialize(PartitionedTableFunctionDef def) throws HiveException {
@@ -206,7 +209,7 @@ public class PTFDeserializer {
     if (tEval.isTransformsRawInput())
     {
       tResolver.initializeRawInputOI();
-      initialize(def.getRawInputShape());
+      initialize(def.getRawInputShape(), tEval.getRawInputOI());
     }
     else {
       def.setRawInputShape(inpShape);
@@ -218,7 +221,7 @@ public class PTFDeserializer {
      * 4. give Evaluator chance to setup for Output execution; setup Output shape.
      */
     tResolver.initializeOutputOI();
-    initialize(def.getOutputShape());
+    initialize(def.getOutputShape(), tEval.getOutputOI());
   }
 
   static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException
@@ -286,10 +289,11 @@ public class PTFDeserializer {
     return outOI;
   }
 
-  protected void initialize(ShapeDetails shp) throws HiveException {
+  protected void initialize(ShapeDetails shp, StructObjectInspector OI) throws HiveException {
     String serdeClassName = shp.getSerdeClassName();
     Properties serDeProps = new Properties();
-    Map<String, String> serdePropsMap = shp.getSerdeProps();
+    Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
+    addOIPropertiestoSerDePropsMap(OI, serdePropsMap);
     for (String serdeName : serdePropsMap.keySet()) {
       serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
     }
@@ -328,4 +332,43 @@ public class PTFDeserializer {
     }
   }
 
+  @SuppressWarnings({"unchecked"})
+  public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI,
+      Map<String,String> serdePropsMap) {
+
+    if ( serdePropsMap == null ) {
+      return;
+    }
+
+    ArrayList<? extends Object>[] tInfo = getTypeMap(OI);
+
+    ArrayList<String> columnNames = (ArrayList<String>) tInfo[0];
+    ArrayList<TypeInfo> fields = (ArrayList<TypeInfo>) tInfo[1];
+    StringBuilder cNames = new StringBuilder();
+    StringBuilder cTypes = new StringBuilder();
+
+    for (int i = 0; i < fields.size(); i++)
+    {
+      cNames.append(i > 0 ? "," : "");
+      cTypes.append(i > 0 ? "," : "");
+      cNames.append(columnNames.get(i));
+      cTypes.append(fields.get(i).getTypeName());
+    }
+
+    serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
+        cNames.toString());
+    serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
+        cTypes.toString());
+  }
+
+  private static ArrayList<? extends Object>[] getTypeMap(
+      StructObjectInspector oi) {
+    StructTypeInfo t = (StructTypeInfo) TypeInfoUtils
+        .getTypeInfoFromObjectInspector(oi);
+    ArrayList<String> fnames = t.getAllStructFieldNames();
+    ArrayList<TypeInfo> fields = t.getAllStructFieldTypeInfos();
+    return new ArrayList<?>[]
+    { fnames, fields };
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java?rev=1466754&r1=1466753&r2=1466754&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java Thu Apr 11 02:03:33 2013
@@ -20,16 +20,14 @@ package org.apache.hadoop.hive.ql.udf.pt
 
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.Map;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
-import org.apache.hadoop.hive.ql.exec.PTFUtils;
-import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.PTFTranslator;
@@ -56,7 +54,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 /**
@@ -90,8 +87,7 @@ public class NPath extends TableFunction
   /*
    * the names of the Columns of the input to NPath. Used to setup the tpath Struct column.
    */
-  private ArrayList<String> inputColumnNames;
-  private ArrayList<String> selectListNames;
+  private HashMap<String,String> inputColumnNamesMap;
 
   @Override
   public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException
@@ -124,20 +120,12 @@ public class NPath extends TableFunction
         message));
   }
 
-  public ArrayList<String> getInputColumnNames() {
-    return inputColumnNames;
-  }
-
-  public void setInputColumnNames(ArrayList<String> inputColumnNames) {
-    this.inputColumnNames = inputColumnNames;
+  public HashMap<String,String> getInputColumnNames() {
+    return inputColumnNamesMap;
   }
 
-  public ArrayList<String> getSelectListNames() {
-    return selectListNames;
-  }
-
-  public void setSelectListNames(ArrayList<String> selectListNames) {
-    this.selectListNames = selectListNames;
+  public void setInputColumnNames(HashMap<String,String> inputColumnNamesMap) {
+    this.inputColumnNamesMap = inputColumnNamesMap;
   }
 
   public static class NPathResolver extends TableFunctionResolver
@@ -204,8 +192,6 @@ public class NPath extends TableFunction
       }
       evaluator.resultExprInfo = resultExprParser.getResultExprInfo();
       StructObjectInspector OI = evaluator.resultExprInfo.resultOI;
-      evaluator.selectListNames = new ArrayList<String>();
-      extractOIColumnNames(resultExprParser.selectListInputOI, evaluator.selectListNames);
 
       setOutputOI(OI);
     }
@@ -358,15 +344,6 @@ public class NPath extends TableFunction
       return evaluator.resultExprInfo.getResultExprNames();
     }
 
-
-
-    private static void extractOIColumnNames(StructObjectInspector OI,
-        ArrayList<String> oiColumnNames) {
-      StructTypeInfo t = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(OI);
-      ArrayList<String> fnames = t.getAllStructFieldNames();
-      oiColumnNames.addAll(fnames);
-    }
-
   }
 
   public ResultExprInfo getResultExprInfo() {
@@ -842,11 +819,9 @@ public class NPath extends TableFunction
       PTFInputDef inpDef) throws SemanticException {
     RowResolver rr = new RowResolver();
     RowResolver inputRR = inpDef.getOutputShape().getRr();
-    boolean inputColNamesKnown = evaluator.inputColumnNames != null;
 
-    if ( !inputColNamesKnown ) {
-      evaluator.inputColumnNames = new ArrayList<String>();
-    }
+    evaluator.inputColumnNamesMap = new HashMap<String,String>();
+    ArrayList<String> inputColumnNames = new ArrayList<String>();
 
     ArrayList<ObjectInspector> inpColOIs = new ArrayList<ObjectInspector>();
 
@@ -862,21 +837,21 @@ public class NPath extends TableFunction
       inExpr = PTFTranslator.getASTNode(inpCInfo, inputRR);
       if ( inExpr != null ) {
         rr.putExpression(inExpr, cInfo);
+        colAlias = inExpr.toStringTree().toLowerCase();
       }
       else {
         colAlias = colAlias == null ? cInfo.getInternalName() : colAlias;
         rr.put(cInfo.getTabAlias(), colAlias, cInfo);
       }
 
-      if ( !inputColNamesKnown ) {
-        evaluator.inputColumnNames.add(colAlias);
-      }
+      evaluator.inputColumnNamesMap.put(cInfo.getInternalName(), colAlias);
+      inputColumnNames.add(colAlias);
       inpColOIs.add(cInfo.getObjectInspector());
     }
 
     StandardListObjectInspector pathAttrOI =
         ObjectInspectorFactory.getStandardListObjectInspector(
-        ObjectInspectorFactory.getStandardStructObjectInspector(evaluator.inputColumnNames,
+        ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames,
             inpColOIs));
 
     ColumnInfo pathColumn = new ColumnInfo(PATHATTR_NAME,
@@ -890,21 +865,29 @@ public class NPath extends TableFunction
 
   protected static StructObjectInspector createSelectListOI(NPath evaluator, PTFInputDef inpDef) {
     StructObjectInspector inOI = inpDef.getOutputShape().getOI();
+    ArrayList<String> inputColumnNames = new ArrayList<String>();
+    ArrayList<String> selectListNames = new ArrayList<String>();
     ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
     for(StructField f : inOI.getAllStructFieldRefs()) {
-      fieldOIs.add(f.getFieldObjectInspector());
+      String inputColName = evaluator.inputColumnNamesMap.get(f.getFieldName());
+      if ( inputColName != null ) {
+        inputColumnNames.add(inputColName);
+        selectListNames.add(f.getFieldName());
+        fieldOIs.add(f.getFieldObjectInspector());
+      }
     }
 
     StandardListObjectInspector pathAttrOI =
         ObjectInspectorFactory.getStandardListObjectInspector(
-        ObjectInspectorFactory.getStandardStructObjectInspector(evaluator.inputColumnNames,
+        ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames,
             fieldOIs));
 
     ArrayList<ObjectInspector> selectFieldOIs = new ArrayList<ObjectInspector>();
     selectFieldOIs.addAll(fieldOIs);
     selectFieldOIs.add(pathAttrOI);
+    selectListNames.add(NPath.PATHATTR_NAME);
     return ObjectInspectorFactory.getStandardStructObjectInspector(
-        evaluator.selectListNames, selectFieldOIs);
+        selectListNames, selectFieldOIs);
   }
 
   public static Object getSelectListInput(Object currRow, ObjectInspector rowOI,

Modified: hive/trunk/ql/src/test/queries/clientpositive/ptf_npath.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ptf_npath.q?rev=1466754&r1=1466753&r2=1466754&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ptf_npath.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/ptf_npath.q Thu Apr 11 02:03:33 2013
@@ -20,7 +20,7 @@ from npath(on 
         sort by year, month, day_of_month  
       arg1('LATE.LATE+'), 
       arg2('LATE'), arg3(arr_delay > 15), 
-    arg4('origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath as tpath') 
+    arg4('origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath') 
    );       
 
 -- 2. Npath on 1 partition
@@ -30,7 +30,7 @@ from npath(on 
         sort by year, month, day_of_month  
       arg1('LATE.LATE+'), 
       arg2('LATE'), arg3(arr_delay > 15), 
-    arg4('origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath as tpath') 
+    arg4('origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath') 
    )
 where fl_num = 1142;       
    
\ No newline at end of file