You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/03/29 07:11:36 UTC

svn commit: r1462414 - in /hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql: exec/ optimizer/ parse/ plan/

Author: hashutosh
Date: Fri Mar 29 06:11:36 2013
New Revision: 1462414

URL: http://svn.apache.org/r1462414
Log:
HIVE-4254 [jira] Code cleanup : debug methods, having clause associated with Windowing
(Harish Butani via Ashutosh Chauhan)

Summary:
cleanup code

remove debug functions in SemanticAnalyzer
	remove code dealing with having clause associated with Windowing

Test Plan: EMPTY

Reviewers: JIRA, ashutoshc

Reviewed By: ashutoshc

Differential Revision: https://reviews.facebook.net/D9795

Modified:
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingComponentizer.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java Fri Mar 29 06:11:36 2013
@@ -42,13 +42,10 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
 public class PTFOperator extends Operator<PTFDesc> implements Serializable
 {
@@ -330,19 +327,11 @@ public class PTFOperator extends Operato
     int numWdwExprs = wdwExprs == null ? 0 : wdwExprs.size();
     Object[] output = new Object[numCols];
 
-    PTFExpressionDef havingExpr = wTFnDef.getHavingExpression();
-    boolean applyHaving = havingExpr != null;
-    Converter hvgConverter = !applyHaving ? null
-        : ObjectInspectorConverters
-            .getConverter(
-                havingExpr.getOI(),
-                PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
-    ExprNodeEvaluator havingCondEval = !applyHaving ? null : havingExpr.getExprEvaluator();
     /*
      * If this Windowing invocation has no Window Expressions and doesn't need to be filtered,
      * we can just forward the row in the oPart partition.
      */
-    boolean forwardRowsUntouched = !applyHaving && (wdwExprs == null || wdwExprs.size() == 0 );
+    boolean forwardRowsUntouched = (wdwExprs == null || wdwExprs.size() == 0 );
 
     PTFPartitionIterator<Object> pItr = oPart.iterator();
     PTFOperator.connectLeadLagFunctionsToPartition(conf, pItr);
@@ -360,17 +349,6 @@ public class PTFOperator extends Operato
         continue;
       }
 
-      if (applyHaving)
-      {
-        Object hvgCond = null;
-        hvgCond = havingCondEval.evaluate(oRow);
-        hvgCond = hvgConverter.convert(hvgCond);
-        if (!((Boolean) hvgCond).booleanValue())
-        {
-          continue;
-        }
-      }
-
       /*
        * Setup the output row columns in the following order
        * - the columns in the SelectList processed by the PTF

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Fri Mar 29 06:11:36 2013
@@ -253,10 +253,6 @@ public final class ColumnPrunerProcFacto
            Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
          }
        }
-       if(tDef.getHavingExpression() != null){
-         ExprNodeDesc exprNode = tDef.getHavingExpression().getExprNode();
-         Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
-       }
     }
 
     private List<String> getLowerCasePrunedCols(List<String> prunedCols){

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java Fri Mar 29 06:11:36 2013
@@ -261,27 +261,13 @@ public class PTFTranslator {
       wdwTFnDef.setOutputShape(copyShape(wdwOutShape));
     }
 
-    /*
-     * translate having
-     */
-    if ( wdwSpec.getFilterExpr() != null ) {
-      PTFExpressionDef hvEDef;
-      try {
-        hvEDef = buildExpressionDef(wdwOutShape, wdwSpec.getFilterExpr());
-        wdwTFnDef.setHavingExpression(hvEDef);
-      }
-      catch(HiveException he) {
-        throw new SemanticException(he);
-      }
-    }
-
     tFn.setupOutputOI();
 
     /*
-     * If we have windowExpressions or having then we convert to Std. Object to process;
+     * If we have windowExpressions then we convert to Std. Object to process;
      * we just stream these rows; no need to put in an output Partition.
      */
-    if ( windowExpressions.size() > 0 || wdwSpec.getFilterExpr() != null ) {
+    if ( windowExpressions.size() > 0 ) {
       StructObjectInspector oi = (StructObjectInspector)
           ObjectInspectorUtils.getStandardObjectInspector(wdwTFnDef.getOutputShape().getOI());
       wdwTFnDef.getOutputShape().setOI(oi);

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Mar 29 06:11:36 2013
@@ -29,7 +29,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import java.util.Stack;
 import java.util.TreeSet;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
@@ -71,7 +70,6 @@ import org.apache.hadoop.hive.ql.exec.Ma
 import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
-import org.apache.hadoop.hive.ql.exec.PTFOperator;
 import org.apache.hadoop.hive.ql.exec.RecordReader;
 import org.apache.hadoop.hive.ql.exec.RecordWriter;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -177,8 +175,6 @@ import org.apache.hadoop.hive.ql.plan.Op
 import org.apache.hadoop.hive.ql.plan.PTFDesc;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.OrderExpressionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFExpressionDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFInputDef;
-import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFQueryInputDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
@@ -195,7 +191,6 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
-import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
@@ -8597,8 +8592,6 @@ public class SemanticAnalyzer extends Ba
     // up with later.
     Operator sinkOp = genPlan(qb);
 
-    dumpOperatorChain(sinkOp, null);
-
     resultSchema =
         convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRowResolver());
 
@@ -9949,65 +9942,6 @@ public class SemanticAnalyzer extends Ba
     return false;
   }
 
-
-  private static class CheckLeadLagInSelectExprs implements ContextVisitor
-  {
-    QB qb;
-    String dest;
-    boolean hasLeadLagExprs = false;
-    boolean error = false;
-    String errString;
-
-    private CheckLeadLagInSelectExprs(QB qb, String dest) {
-      this.qb = qb;
-      this.dest = dest;
-    }
-
-    void reset() {
-      hasLeadLagExprs = false;
-      error = false;
-      errString = null;
-    }
-
-    @Override
-    public void visit(Object t, Object parent, int childIndex, Map labels)
-    {
-      error = false; errString = null;
-      ASTNode function = (ASTNode) t;
-      WindowingSpec wSpec = qb.getWindowingSpec(dest);
-
-      HashMap<String, ASTNode> windowingExprs =
-          qb.getParseInfo().getWindowingExprsForClause(dest);
-
-      /*
-       * reference to a Windowing Fns is allowed in Windowing Exprs.
-       */
-      if ( windowingExprs != null && windowingExprs.containsKey(function.toStringTree())) {
-        return;
-      }
-
-      String fnName = function.getChild(0).getText().toLowerCase();
-      if (fnName.equals(FunctionRegistry.LEAD_FUNC_NAME)
-          || fnName.equals(FunctionRegistry.LAG_FUNC_NAME))
-      {
-        hasLeadLagExprs = true;
-      }
-    }
-
-    public boolean hasLeadLagExprs() {
-      return hasLeadLagExprs;
-    }
-
-    protected boolean isError() {
-      return error;
-    }
-
-    protected String getErrString() {
-      return errString;
-    }
-  }
-
-
   /*
    * - Invoked during Phase1 when a TOK_SELECT is encountered.
    * - Select tree form is: ^(TOK_SELECT ^(TOK_SELECTEXPR...) ^(TOK_SELECTEXPR...) ...)
@@ -10620,8 +10554,6 @@ public class SemanticAnalyzer extends Ba
       rr = ptfOpRR;
     }
 
-    dumpOperatorChain(input, null);
-
     return input;
   }
 
@@ -10789,151 +10721,4 @@ public class SemanticAnalyzer extends Ba
     return selSpec;
   }
 
-  // debug methods
-  void dumpOperatorChain(Operator sinkOp, PTFDesc ptfDesc) {
-    Stack<Operator> stack = new Stack<Operator>();
-    Operator op = sinkOp;
-    while(op != null ) {
-      stack.push(op);
-      List<Operator> parentOps =op.getParentOperators();
-      if (parentOps != null ) {
-        op = parentOps.get(0);
-      }
-      else {
-        op = null;
-      }
-    }
-
-    int opNum = 1;
-    StringBuilder buf = new StringBuilder();
-    while(!stack.isEmpty()) {
-      op = stack.pop();
-      buf.append("\n").append(opNum).append(".");
-      buf.append(op.getName());
-      buf.append(" :\n");
-      RowResolver rr = opParseCtx.get(op).getRowResolver();
-      dumpRowResolver(buf, rr);
-      if ( op instanceof PTFOperator && ptfDesc != null ) {
-        /*
-         * 1/21 hb: this is no longer correct; in a chain containing multiple PTFOps,
-         * every PTFOp dump prints the info from the
-         * last PTFDef
-         */
-        dump(buf, ptfDesc);
-      }
-      opNum++;
-    }
-    LOG.info(buf);
-  }
-
-  static void dumpRowResolver(StringBuilder buf, RowResolver rr) {
-    buf.append("RowResolver::\n");
-    buf.append("\tcolumns:[");
-    boolean first = true;
-    for(ColumnInfo cInfo : rr.getRowSchema().getSignature()) {
-      String tabalias = cInfo.getTabAlias();
-      String cname = cInfo.getInternalName();
-      if (!first) {
-        buf.append(", ");
-      } else {
-        first = false;
-      }
-      buf.append(tabalias != null ? tabalias : "<null>");
-      buf.append(".");
-      buf.append(cname);
-    }
-    buf.append("]\n");
-    buf.append("\tAliases:[");
-    for(Map.Entry<String, LinkedHashMap<String, ColumnInfo>> entry : rr.getRslvMap().entrySet() ) {
-      String tabalias = entry.getKey();
-      buf.append("\n\t\t");
-      buf.append(tabalias != null ? tabalias : "<null>");
-      buf.append(":[");
-      LinkedHashMap<String, ColumnInfo> colAliases = entry.getValue();
-      first = true;
-      for(Map.Entry<String, ColumnInfo> column: colAliases.entrySet()) {
-        if (!first) {
-          buf.append(", ");
-        } else {
-          first = false;
-        }
-        buf.append(column.getKey()).append(" -> ").append(column.getValue().getInternalName());
-      }
-    }
-    buf.append("\n\t]\n");
-    buf.append("\tcolumns mapped to expressions:[");
-    first = true;
-    for(Map.Entry<String, ASTNode> exprs : rr.getExpressionMap().entrySet()) {
-      if (!first) {
-        buf.append(", ");
-      } else {
-        first = false;
-      }
-      buf.append("\n\t\t");
-      buf.append(exprs.getKey());
-      buf.append(" -> ");
-      buf.append(exprs.getValue().toStringTree());
-    }
-    buf.append("\n\t]\n");
-  }
-
-  private static void dump(StringBuilder buf, PTFDesc ptfDesc) {
-    Stack<PTFInputDef> ptfChain = new Stack<PTFInputDef>();
-    PTFInputDef currentDef = ptfDesc.getFuncDef();
-    while(currentDef != null ) {
-      ptfChain.push(currentDef);
-      currentDef = currentDef.getInput();
-    }
-
-    while(!ptfChain.isEmpty() ) {
-      PTFInputDef iDef = ptfChain.pop();
-      if ( iDef instanceof PTFQueryInputDef ) {
-        dump(buf, (PTFQueryInputDef) iDef);
-      }else {
-        dump(buf, (PartitionedTableFunctionDef) iDef);
-      }
-    }
-
-  }
-
-  private static void dump(StringBuilder buf, PartitionedTableFunctionDef tFnDef) {
-    buf.append("\n").append(tFnDef.getName()).append(":");
-    dump(buf, (PTFInputDef)tFnDef);
-    TableFunctionEvaluator tFn = tFnDef.getTFunction();
-
-    if ( tFn.isTransformsRawInput() ) {
-      buf.append("\nEvaluator RawInput ObjectInspector:[");
-      dump(buf, tFn.getRawInputOI());
-      buf.append("]");
-    }
-
-    buf.append("\nEvaluator Output ObjectInspector:[");
-    dump(buf, tFn.getOutputOI());
-    buf.append("]");
-
-  }
-  private static void dump(StringBuilder buf, PTFQueryInputDef htblDef) {
-    buf.append("\n").append(htblDef.getDestination()).append(":");
-    dump(buf, (PTFInputDef)htblDef);
-  }
-
-  private static void dump(StringBuilder buf, PTFInputDef qInDef) {
-    StructObjectInspector OI = (StructObjectInspector) qInDef.getOutputShape().getOI();
-    buf.append("\nDef ObjectInspector:[");
-    dump(buf, OI);
-    buf.append("]\nSerDe:").append(qInDef.getOutputShape().getSerde().getClass().getName());
-  }
-
-  private static void dump(StringBuilder buf, StructObjectInspector OI) {
-    boolean first = true;
-    for(StructField field : OI.getAllStructFieldRefs() ) {
-      if (!first) {
-        buf.append(", ");
-      } else {
-        first = false;
-      }
-      buf.append(field.getFieldName());
-    }
-  }
-
 }

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingComponentizer.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingComponentizer.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingComponentizer.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingComponentizer.java Fri Mar 29 06:11:36 2013
@@ -85,12 +85,6 @@ public class WindowingComponentizer {
         PTFTranslator t = new PTFTranslator();
         t.translate(wSpec, semAly, hCfg, inputRR, unparseT);
         groups.remove(entry.getKey());
-        /*
-         * add the filter expression to the last WindowSpec.
-         */
-        if (groups.isEmpty()) {
-          wSpec.setFilterExpr(originalSpec.getFilterExpr());
-        }
         return wSpec;
       } catch (SemanticException se) {
         originalException = se;

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java Fri Mar 29 06:11:36 2013
@@ -59,7 +59,6 @@ import org.apache.hadoop.hive.ql.parse.P
  */
 public class WindowingSpec {
   HashMap<String, WindowExpressionSpec> aliasToWdwExpr;
-  ASTNode filterExpr;
   HashMap<String, WindowSpec> windowSpecs;
   ArrayList<WindowExpressionSpec> windowExpressions;
 
@@ -98,14 +97,6 @@ public class WindowingSpec {
     this.aliasToWdwExpr = aliasToWdwExpr;
   }
 
-  public ASTNode getFilterExpr() {
-    return filterExpr;
-  }
-
-  public void setFilterExpr(ASTNode filterExpr) {
-    this.filterExpr = filterExpr;
-  }
-
   public HashMap<String, WindowSpec> getWindowSpecs() {
     return windowSpecs;
   }

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java Fri Mar 29 06:11:36 2013
@@ -215,7 +215,6 @@ public class PTFDesc extends AbstractOpe
   }
 
   public static class WindowTableFunctionDef extends PartitionedTableFunctionDef {
-    PTFExpressionDef havingExpression;
     ArrayList<WindowExpressionDef> windowExpressions;
     ArrayList<WindowFunctionDef> windowFunctions;
     /*
@@ -226,12 +225,6 @@ public class PTFDesc extends AbstractOpe
      */
     ShapeDetails outputFromWdwFnProcessing;
 
-    public PTFExpressionDef getHavingExpression() {
-      return havingExpression;
-    }
-    public void setHavingExpression(PTFExpressionDef havingExpression) {
-      this.havingExpression = havingExpression;
-    }
     public ArrayList<WindowExpressionDef> getWindowExpressions() {
       return windowExpressions;
     }

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1462414&r1=1462413&r2=1462414&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Fri Mar 29 06:11:36 2013
@@ -158,10 +158,6 @@ public class PTFDeserializer {
       }
     }
 
-    if ( def.getHavingExpression() != null ) {
-      initialize(def.getHavingExpression(), inpShape);
-    }
-
     /*
      * 4. give Evaluator chance to setup for Output execution; setup Output shape.
      */
@@ -169,10 +165,10 @@ public class PTFDeserializer {
     tResolver.initializeOutputOI();
 
     /*
-     * If we have windowExpressions or having then we convert to Std. Object to process;
+     * If we have windowExpressions then we convert to Std. Object to process;
      * we just stream these rows; no need to put in an output Partition.
      */
-    if ( def.getWindowExpressions().size() > 0 || def.getHavingExpression() != null ) {
+    if ( def.getWindowExpressions().size() > 0  ) {
       StructObjectInspector oi = (StructObjectInspector)
           ObjectInspectorUtils.getStandardObjectInspector(def.getOutputShape().getOI());
       def.getOutputShape().setOI(oi);