You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/30 17:22:48 UTC

svn commit: r1635536 [11/28] - in /hive/branches/spark: ./ accumulo-handler/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/test/org/apache/hado...

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java Thu Oct 30 16:22:33 2014
@@ -77,7 +77,8 @@ public class HiveOptiqUtil {
 
   public static boolean validateASTForCBO(ASTNode ast) {
     String astTree = ast.toStringTree();
-    String[] tokens = { "TOK_CHARSETLITERAL" };
+    // if any of following tokens are present in AST, bail out
+    String[] tokens = { "TOK_CHARSETLITERAL","TOK_TABLESPLITSAMPLE" };
     for (String token : tokens) {
       if (astTree.contains(token)) {
         return false;
@@ -241,7 +242,7 @@ public class HiveOptiqUtil {
    * <p>
    * JoinPredicateInfo:<br>
    * 1. preserves the order of conjuctive elements for
-   * equi-join(m_equiJoinPredicateElements)<br>
+   * equi-join(equiJoinPredicateElements)<br>
    * 2. Stores set of projection indexes from left and right child which is part
    * of equi join keys; the indexes are both in child and Join node schema.<br>
    * 3. Keeps a map of projection indexes that are part of join keys to list of
@@ -275,19 +276,19 @@ public class HiveOptiqUtil {
     }
 
     public List<JoinLeafPredicateInfo> getNonEquiJoinPredicateElements() {
-      return nonEquiJoinPredicateElements;
+      return this.nonEquiJoinPredicateElements;
     }
 
     public List<JoinLeafPredicateInfo> getEquiJoinPredicateElements() {
-      return equiJoinPredicateElements;
+      return this.equiJoinPredicateElements;
     }
 
     public Set<Integer> getProjsFromLeftPartOfJoinKeysInChildSchema() {
-      return projsFromLeftPartOfJoinKeysInChildSchema;
+      return this.projsFromLeftPartOfJoinKeysInChildSchema;
     }
 
     public Set<Integer> getProjsFromRightPartOfJoinKeysInChildSchema() {
-      return projsFromRightPartOfJoinKeysInChildSchema;
+      return this.projsFromRightPartOfJoinKeysInChildSchema;
     }
 
     /**
@@ -296,15 +297,15 @@ public class HiveOptiqUtil {
      * schema.
      */
     public Set<Integer> getProjsFromLeftPartOfJoinKeysInJoinSchema() {
-      return projsFromLeftPartOfJoinKeysInChildSchema;
+      return this.projsFromLeftPartOfJoinKeysInChildSchema;
     }
 
     public Set<Integer> getProjsFromRightPartOfJoinKeysInJoinSchema() {
-      return projsFromRightPartOfJoinKeysInJoinSchema;
+      return this.projsFromRightPartOfJoinKeysInJoinSchema;
     }
 
     public Map<Integer, ImmutableList<JoinLeafPredicateInfo>> getMapOfProjIndxToLeafPInfo() {
-      return mapOfProjIndxInJoinSchemaToLeafPInfo;
+      return this.mapOfProjIndxInJoinSchemaToLeafPInfo;
     }
 
     public static JoinPredicateInfo constructJoinPredicateInfo(HiveJoinRel j) {
@@ -336,7 +337,7 @@ public class HiveOptiqUtil {
         jlpi = JoinLeafPredicateInfo.constructJoinLeafPredicateInfo(j, ce);
 
         // 2.2 Classify leaf predicate as Equi vs Non Equi
-        if (jlpi.m_comparisonType.equals(SqlKind.EQUALS)) {
+        if (jlpi.comparisonType.equals(SqlKind.EQUALS)) {
           equiLPIList.add(jlpi);
         } else {
           nonEquiLPIList.add(jlpi);
@@ -398,38 +399,38 @@ public class HiveOptiqUtil {
    * of equi join keys; the indexes are both in child and Join node schema.<br>
    */
   public static class JoinLeafPredicateInfo {
-    private final SqlKind                m_comparisonType;
-    private final ImmutableList<RexNode> m_joinKeyExprsFromLeft;
-    private final ImmutableList<RexNode> m_joinKeyExprsFromRight;
-    private final ImmutableSet<Integer>  m_projsFromLeftPartOfJoinKeysInChildSchema;
-    private final ImmutableSet<Integer>  m_projsFromRightPartOfJoinKeysInChildSchema;
-    private final ImmutableSet<Integer>  m_projsFromRightPartOfJoinKeysInJoinSchema;
+    private final SqlKind                comparisonType;
+    private final ImmutableList<RexNode> joinKeyExprsFromLeft;
+    private final ImmutableList<RexNode> joinKeyExprsFromRight;
+    private final ImmutableSet<Integer>  projsFromLeftPartOfJoinKeysInChildSchema;
+    private final ImmutableSet<Integer>  projsFromRightPartOfJoinKeysInChildSchema;
+    private final ImmutableSet<Integer>  projsFromRightPartOfJoinKeysInJoinSchema;
 
     public JoinLeafPredicateInfo(SqlKind comparisonType, List<RexNode> joinKeyExprsFromLeft,
         List<RexNode> joinKeyExprsFromRight, Set<Integer> projsFromLeftPartOfJoinKeysInChildSchema,
         Set<Integer> projsFromRightPartOfJoinKeysInChildSchema,
         Set<Integer> projsFromRightPartOfJoinKeysInJoinSchema) {
-      m_comparisonType = comparisonType;
-      m_joinKeyExprsFromLeft = ImmutableList.copyOf(joinKeyExprsFromLeft);
-      m_joinKeyExprsFromRight = ImmutableList.copyOf(joinKeyExprsFromRight);
-      m_projsFromLeftPartOfJoinKeysInChildSchema = ImmutableSet
+      this.comparisonType = comparisonType;
+      this.joinKeyExprsFromLeft = ImmutableList.copyOf(joinKeyExprsFromLeft);
+      this.joinKeyExprsFromRight = ImmutableList.copyOf(joinKeyExprsFromRight);
+      this.projsFromLeftPartOfJoinKeysInChildSchema = ImmutableSet
           .copyOf(projsFromLeftPartOfJoinKeysInChildSchema);
-      m_projsFromRightPartOfJoinKeysInChildSchema = ImmutableSet
+      this.projsFromRightPartOfJoinKeysInChildSchema = ImmutableSet
           .copyOf(projsFromRightPartOfJoinKeysInChildSchema);
-      m_projsFromRightPartOfJoinKeysInJoinSchema = ImmutableSet
+      this.projsFromRightPartOfJoinKeysInJoinSchema = ImmutableSet
           .copyOf(projsFromRightPartOfJoinKeysInJoinSchema);
     }
 
     public List<RexNode> getJoinKeyExprsFromLeft() {
-      return m_joinKeyExprsFromLeft;
+      return this.joinKeyExprsFromLeft;
     }
 
     public List<RexNode> getJoinKeyExprsFromRight() {
-      return m_joinKeyExprsFromRight;
+      return this.joinKeyExprsFromRight;
     }
 
     public Set<Integer> getProjsFromLeftPartOfJoinKeysInChildSchema() {
-      return m_projsFromLeftPartOfJoinKeysInChildSchema;
+      return this.projsFromLeftPartOfJoinKeysInChildSchema;
     }
 
     /**
@@ -438,15 +439,15 @@ public class HiveOptiqUtil {
      * schema.
      */
     public Set<Integer> getProjsFromLeftPartOfJoinKeysInJoinSchema() {
-      return m_projsFromLeftPartOfJoinKeysInChildSchema;
+      return this.projsFromLeftPartOfJoinKeysInChildSchema;
     }
 
     public Set<Integer> getProjsFromRightPartOfJoinKeysInChildSchema() {
-      return m_projsFromRightPartOfJoinKeysInChildSchema;
+      return this.projsFromRightPartOfJoinKeysInChildSchema;
     }
 
     public Set<Integer> getProjsFromRightPartOfJoinKeysInJoinSchema() {
-      return m_projsFromRightPartOfJoinKeysInJoinSchema;
+      return this.projsFromRightPartOfJoinKeysInJoinSchema;
     }
 
     private static JoinLeafPredicateInfo constructJoinLeafPredicateInfo(HiveJoinRel j, RexNode pe) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java Thu Oct 30 16:22:33 2014
@@ -17,274 +17,135 @@
  */
 package org.apache.hadoop.hive.ql.optimizer.optiq.rules;
 
-import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
 import java.util.ListIterator;
 
 import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveFilterRel;
-import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveJoinRel;
-import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveRel;
+import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveProjectRel;
 import org.eigenbase.rel.FilterRelBase;
 import org.eigenbase.rel.JoinRelBase;
 import org.eigenbase.rel.JoinRelType;
-import org.eigenbase.rel.RelNode;
+import org.eigenbase.rel.RelFactories;
+import org.eigenbase.rel.rules.PushFilterPastJoinRule;
 import org.eigenbase.relopt.RelOptRule;
 import org.eigenbase.relopt.RelOptRuleCall;
 import org.eigenbase.relopt.RelOptRuleOperand;
-import org.eigenbase.relopt.RelOptUtil;
 import org.eigenbase.relopt.RelOptUtil.InputFinder;
-import org.eigenbase.rex.RexBuilder;
 import org.eigenbase.rex.RexCall;
 import org.eigenbase.rex.RexNode;
-import org.eigenbase.rex.RexUtil;
 import org.eigenbase.sql.SqlKind;
-import org.eigenbase.util.Holder;
 
-public abstract class HivePushFilterPastJoinRule extends RelOptRule {
+public abstract class HivePushFilterPastJoinRule extends PushFilterPastJoinRule {
 
-  public static final HivePushFilterPastJoinRule FILTER_ON_JOIN = new HivePushFilterPastJoinRule(
-      operand(FilterRelBase.class, operand(HiveJoinRel.class, any())),
-      "HivePushFilterPastJoinRule:filter", true) {
-    @Override
-    public void onMatch(RelOptRuleCall call) {
-      HiveFilterRel filter = call.rel(0);
-      HiveJoinRel join = call.rel(1);
-      perform(call, filter, join);
-    }
-  };
-
-  public static final HivePushFilterPastJoinRule JOIN = new HivePushFilterPastJoinRule(
-      operand(HiveJoinRel.class, any()), "HivePushFilterPastJoinRule:no-filter", false) {
-    @Override
-    public void onMatch(RelOptRuleCall call) {
-      HiveJoinRel join = call.rel(0);
-      perform(call, null, join);
-    }
-  };
-
-  /** Whether to try to strengthen join-type. */
-  private final boolean smart;
-
-  // ~ Constructors -----------------------------------------------------------
-
-  /**
-   * Creates a PushFilterPastJoinRule with an explicit root operand.
-   */
-  private HivePushFilterPastJoinRule(RelOptRuleOperand operand, String id, boolean smart) {
-    super(operand, "PushFilterRule: " + id);
-    this.smart = smart;
-  }
-
-  // ~ Methods ----------------------------------------------------------------
-
-  protected void perform(RelOptRuleCall call, FilterRelBase filter,
-      JoinRelBase join) {
-    final List<RexNode> joinFilters = RelOptUtil.conjunctions(join
-        .getCondition());
-
-    /*
-     * todo: hb 6/26/14 for left SemiJoin we cannot push predicates yet. The
-     * assertion that num(JoinRel columns) = num(leftSrc) + num(rightSrc)
-     * doesn't hold. So RelOptUtil.classifyFilters fails.
-     */
-    if (((HiveJoinRel) join).isLeftSemiJoin()) {
-      return;
-    }
-
-    if (filter == null) {
-      // There is only the joinRel
-      // make sure it does not match a cartesian product joinRel
-      // (with "true" condition) otherwise this rule will be applied
-      // again on the new cartesian product joinRel.
-      boolean onlyTrueFilter = true;
-      for (RexNode joinFilter : joinFilters) {
-        if (!joinFilter.isAlwaysTrue()) {
-          onlyTrueFilter = false;
-          break;
-        }
-      }
-
-      if (onlyTrueFilter) {
-        return;
-      }
-    }
-
-    final List<RexNode> aboveFilters = filter != null ? RelOptUtil
-        .conjunctions(filter.getCondition()) : new ArrayList<RexNode>();
-
-    List<RexNode> leftFilters = new ArrayList<RexNode>();
-    List<RexNode> rightFilters = new ArrayList<RexNode>();
-    int origJoinFiltersSz = joinFilters.size();
-
-    // TODO - add logic to derive additional filters. E.g., from
-    // (t1.a = 1 AND t2.a = 2) OR (t1.b = 3 AND t2.b = 4), you can
-    // derive table filters:
-    // (t1.a = 1 OR t1.b = 3)
-    // (t2.a = 2 OR t2.b = 4)
-
-    // Try to push down above filters. These are typically where clause
-    // filters. They can be pushed down if they are not on the NULL
-    // generating side.
-    boolean filterPushed = false;
-    final Holder<JoinRelType> joinTypeHolder = Holder.of(join.getJoinType());
-    if (RelOptUtil.classifyFilters(join, aboveFilters,
-        join.getJoinType(), true, !join.getJoinType().generatesNullsOnLeft(), !join.getJoinType()
-        .generatesNullsOnRight(), joinFilters, leftFilters, rightFilters, joinTypeHolder, smart)) {
-      filterPushed = true;
-    }
-
-    /*
-     * Any predicates pushed down to joinFilters that aren't equality
-     * conditions: put them back as aboveFilters because Hive doesn't support
-     * not equi join conditions.
-     */
-    ListIterator<RexNode> filterIter = joinFilters.listIterator();
-    while (filterIter.hasNext()) {
-      RexNode exp = filterIter.next();
-      if (exp instanceof RexCall) {
-        RexCall c = (RexCall) exp;
-        if (c.getOperator().getKind() == SqlKind.EQUALS) {
-          boolean validHiveJoinFilter = true;
-          for (RexNode rn : c.getOperands()) {
-            // NOTE: Hive dis-allows projections from both left & right side
-            // of join condition. Example: Hive disallows
-            // (r1.x=r2.x)=(r1.y=r2.y) on join condition.
-            if (filterRefersToBothSidesOfJoin(rn, join)) {
-              validHiveJoinFilter = false;
-              break;
-            }
-          }
-          if (validHiveJoinFilter)
-            continue;
-        }
-      }
-      aboveFilters.add(exp);
-      filterIter.remove();
-    }
-
-    /*
-     * if all pushed filters where put back then set filterPushed to false
-     */
-    if (leftFilters.size() == 0 && rightFilters.size() == 0
-        && joinFilters.size() == origJoinFiltersSz) {
-      filterPushed = false;
-    }
-
-    // Try to push down filters in ON clause. A ON clause filter can only be
-    // pushed down if it does not affect the non-matching set, i.e. it is
-    // not on the side which is preserved.
-    if (RelOptUtil.classifyFilters(join, joinFilters, null, false, !join
-        .getJoinType().generatesNullsOnRight(), !join.getJoinType()
-        .generatesNullsOnLeft(), joinFilters, leftFilters, rightFilters, joinTypeHolder, false)) {
-      filterPushed = true;
-    }
-
-    if (!filterPushed) {
-      return;
-    }
-
-    /*
-     * Remove always true conditions that got pushed down.
-     */
-    removeAlwaysTruePredicates(leftFilters);
-    removeAlwaysTruePredicates(rightFilters);
-    removeAlwaysTruePredicates(joinFilters);
-
-    // create FilterRels on top of the children if any filters were
-    // pushed to them
-    RexBuilder rexBuilder = join.getCluster().getRexBuilder();
-    RelNode leftRel = createFilterOnRel(rexBuilder, join.getLeft(), leftFilters);
-    RelNode rightRel = createFilterOnRel(rexBuilder, join.getRight(),
-        rightFilters);
-
-    // create the new join node referencing the new children and
-    // containing its new join filters (if there are any)
-    RexNode joinFilter;
-
-    if (joinFilters.size() == 0) {
-      // if nothing actually got pushed and there is nothing leftover,
-      // then this rule is a no-op
-      if (leftFilters.isEmpty()
-          && rightFilters.isEmpty()
-          && joinTypeHolder.get() == join.getJoinType()) {
-        return;
-      }
-      joinFilter = rexBuilder.makeLiteral(true);
-    } else {
-      joinFilter = RexUtil.composeConjunction(rexBuilder, joinFilters, true);
-    }
-    RelNode newJoinRel = HiveJoinRel.getJoin(join.getCluster(), leftRel,
-        rightRel, joinFilter, join.getJoinType(), false);
-
-    // create a FilterRel on top of the join if needed
-    RelNode newRel = createFilterOnRel(rexBuilder, newJoinRel, aboveFilters);
-
-    call.transformTo(newRel);
-  }
-
-  /**
-   * If the filter list passed in is non-empty, creates a FilterRel on top of
-   * the existing RelNode; otherwise, just returns the RelNode
-   *
-   * @param rexBuilder
-   *          rex builder
-   * @param rel
-   *          the RelNode that the filter will be put on top of
-   * @param filters
-   *          list of filters
-   * @return new RelNode or existing one if no filters
-   */
-  private RelNode createFilterOnRel(RexBuilder rexBuilder, RelNode rel,
-      List<RexNode> filters) {
-    RexNode andFilters = RexUtil.composeConjunction(rexBuilder, filters, false);
-    if (andFilters.isAlwaysTrue()) {
-      return rel;
-    }
-    return new HiveFilterRel(rel.getCluster(), rel.getCluster().traitSetOf(
-        HiveRel.CONVENTION), rel, andFilters);
-  }
-
-  private void removeAlwaysTruePredicates(List<RexNode> predicates) {
-
-    ListIterator<RexNode> iter = predicates.listIterator();
-    while (iter.hasNext()) {
-      RexNode exp = iter.next();
-      if (isAlwaysTrue(exp)) {
-        iter.remove();
-      }
-    }
-  }
-
-  private boolean isAlwaysTrue(RexNode predicate) {
-    if (predicate instanceof RexCall) {
-      RexCall c = (RexCall) predicate;
-      if (c.getOperator().getKind() == SqlKind.EQUALS) {
-        return isAlwaysTrue(c.getOperands().get(0))
-            && isAlwaysTrue(c.getOperands().get(1));
-      }
-    }
-    return predicate.isAlwaysTrue();
-  }
-
-  private boolean filterRefersToBothSidesOfJoin(RexNode filter, JoinRelBase j) {
-    boolean refersToBothSides = false;
-
-    int joinNoOfProjects = j.getRowType().getFieldCount();
-    BitSet filterProjs = new BitSet(joinNoOfProjects);
-    BitSet allLeftProjs = new BitSet(joinNoOfProjects);
-    BitSet allRightProjs = new BitSet(joinNoOfProjects);
-    allLeftProjs.set(0, j.getInput(0).getRowType().getFieldCount(), true);
-    allRightProjs.set(j.getInput(0).getRowType().getFieldCount(), joinNoOfProjects, true);
+	public static final HivePushFilterPastJoinRule FILTER_ON_JOIN = new HivePushFilterIntoJoinRule();
 
-    InputFinder inputFinder = new InputFinder(filterProjs);
-    filter.accept(inputFinder);
+	public static final HivePushFilterPastJoinRule JOIN = new HivePushDownJoinConditionRule();
 
-    if (allLeftProjs.intersects(filterProjs) && allRightProjs.intersects(filterProjs))
-      refersToBothSides = true;
+	/**
+	 * Creates a PushFilterPastJoinRule with an explicit root operand.
+	 */
+	protected HivePushFilterPastJoinRule(RelOptRuleOperand operand, String id,
+			boolean smart, RelFactories.FilterFactory filterFactory,
+			RelFactories.ProjectFactory projectFactory) {
+		super(operand, id, smart, filterFactory, projectFactory);
+	}
+
+	/**
+	 * Rule that tries to push filter expressions into a join condition and into
+	 * the inputs of the join.
+	 */
+	public static class HivePushFilterIntoJoinRule extends
+			HivePushFilterPastJoinRule {
+		public HivePushFilterIntoJoinRule() {
+			super(RelOptRule.operand(FilterRelBase.class,
+					RelOptRule.operand(JoinRelBase.class, RelOptRule.any())),
+					"HivePushFilterPastJoinRule:filter", true,
+					HiveFilterRel.DEFAULT_FILTER_FACTORY,
+					HiveProjectRel.DEFAULT_PROJECT_FACTORY);
+		}
+
+		@Override
+		public void onMatch(RelOptRuleCall call) {
+			FilterRelBase filter = call.rel(0);
+			JoinRelBase join = call.rel(1);
+			super.perform(call, filter, join);
+		}
+	}
+
+	public static class HivePushDownJoinConditionRule extends
+			HivePushFilterPastJoinRule {
+		public HivePushDownJoinConditionRule() {
+			super(RelOptRule.operand(JoinRelBase.class, RelOptRule.any()),
+					"HivePushFilterPastJoinRule:no-filter", true,
+					HiveFilterRel.DEFAULT_FILTER_FACTORY,
+					HiveProjectRel.DEFAULT_PROJECT_FACTORY);
+		}
+
+		@Override
+		public void onMatch(RelOptRuleCall call) {
+			JoinRelBase join = call.rel(0);
+			super.perform(call, null, join);
+		}
+	}
+
+	/*
+	 * Any predicates pushed down to joinFilters that aren't equality
+	 * conditions: put them back as aboveFilters because Hive doesn't support
+	 * not equi join conditions.
+	 */
+	@Override
+	protected void validateJoinFilters(List<RexNode> aboveFilters,
+			List<RexNode> joinFilters, JoinRelBase join, JoinRelType joinType) {
+		if (joinType.equals(JoinRelType.INNER)) {
+			ListIterator<RexNode> filterIter = joinFilters.listIterator();
+			while (filterIter.hasNext()) {
+				RexNode exp = filterIter.next();
+				if (exp instanceof RexCall) {
+					RexCall c = (RexCall) exp;
+					if (c.getOperator().getKind() == SqlKind.EQUALS) {
+						boolean validHiveJoinFilter = true;
+						for (RexNode rn : c.getOperands()) {
+							// NOTE: Hive dis-allows projections from both left
+							// &
+							// right side
+							// of join condition. Example: Hive disallows
+							// (r1.x=r2.x)=(r1.y=r2.y) on join condition.
+							if (filterRefersToBothSidesOfJoin(rn, join)) {
+								validHiveJoinFilter = false;
+								break;
+							}
+						}
+						if (validHiveJoinFilter)
+							continue;
+					}
+				}
+				aboveFilters.add(exp);
+				filterIter.remove();
+			}
+		}
+	}
+
+	private boolean filterRefersToBothSidesOfJoin(RexNode filter, JoinRelBase j) {
+		boolean refersToBothSides = false;
+
+		int joinNoOfProjects = j.getRowType().getFieldCount();
+		BitSet filterProjs = new BitSet(joinNoOfProjects);
+		BitSet allLeftProjs = new BitSet(joinNoOfProjects);
+		BitSet allRightProjs = new BitSet(joinNoOfProjects);
+		allLeftProjs.set(0, j.getInput(0).getRowType().getFieldCount(), true);
+		allRightProjs.set(j.getInput(0).getRowType().getFieldCount(),
+				joinNoOfProjects, true);
+
+		InputFinder inputFinder = new InputFinder(filterProjs);
+		filter.accept(inputFinder);
+
+		if (allLeftProjs.intersects(filterProjs)
+				&& allRightProjs.intersects(filterProjs))
+			refersToBothSides = true;
 
-    return refersToBothSides;
-  }
+		return refersToBothSides;
+	}
 }
 
 // End PushFilterPastJoinRule.java

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java Thu Oct 30 16:22:33 2014
@@ -34,13 +34,13 @@ import org.eigenbase.rex.RexVisitorImpl;
 import org.eigenbase.sql.SqlKind;
 
 public class FilterSelectivityEstimator extends RexVisitorImpl<Double> {
-  private final RelNode m_childRel;
-  private final double  m_childCardinality;
+  private final RelNode childRel;
+  private final double  childCardinality;
 
   protected FilterSelectivityEstimator(RelNode childRel) {
     super(true);
-    m_childRel = childRel;
-    m_childCardinality = RelMetadataQuery.getRowCount(m_childRel);
+    this.childRel = childRel;
+    this.childCardinality = RelMetadataQuery.getRowCount(childRel);
   }
 
   public Double estimateSelectivity(RexNode predicate) {
@@ -53,11 +53,10 @@ public class FilterSelectivityEstimator 
     }
 
     /*
-     * Ignore any predicates on partition columns
-     * because we have already accounted for these in
-     * the Table row count.
+     * Ignore any predicates on partition columns because we have already
+     * accounted for these in the Table row count.
      */
-    if (isPartitionPredicate(call, m_childRel)) {
+    if (isPartitionPredicate(call, this.childRel)) {
       return 1.0;
     }
 
@@ -77,6 +76,7 @@ public class FilterSelectivityEstimator 
 
     case NOT_EQUALS: {
       selectivity = computeNotEqualitySelectivity(call);
+      break;
     }
 
     case LESS_THAN_OR_EQUAL:
@@ -150,10 +150,10 @@ public class FilterSelectivityEstimator 
       if (tmpSelectivity == null) {
         tmpSelectivity = 0.99;
       }
-      tmpCardinality = m_childCardinality * tmpSelectivity;
+      tmpCardinality = childCardinality * tmpSelectivity;
 
       if (tmpCardinality > 1)
-        tmpSelectivity = (1 - tmpCardinality / m_childCardinality);
+        tmpSelectivity = (1 - tmpCardinality / childCardinality);
       else
         tmpSelectivity = 1.0;
 
@@ -194,7 +194,7 @@ public class FilterSelectivityEstimator 
 
     for (RexNode op : call.getOperands()) {
       if (op instanceof RexInputRef) {
-        tmpNDV = HiveRelMdDistinctRowCount.getDistinctRowCount(m_childRel,
+        tmpNDV = HiveRelMdDistinctRowCount.getDistinctRowCount(this.childRel,
             ((RexInputRef) op).getIndex());
         if (tmpNDV > maxNDV)
           maxNDV = tmpNDV;
@@ -202,7 +202,7 @@ public class FilterSelectivityEstimator 
         irv = new InputReferencedVisitor();
         irv.apply(op);
         for (Integer childProjIndx : irv.inputPosReferenced) {
-          tmpNDV = HiveRelMdDistinctRowCount.getDistinctRowCount(m_childRel, childProjIndx);
+          tmpNDV = HiveRelMdDistinctRowCount.getDistinctRowCount(this.childRel, childProjIndx);
           if (tmpNDV > maxNDV)
             maxNDV = tmpNDV;
         }
@@ -213,14 +213,13 @@ public class FilterSelectivityEstimator 
   }
 
   private boolean isPartitionPredicate(RexNode expr, RelNode r) {
-    if ( r instanceof ProjectRelBase ) {
+    if (r instanceof ProjectRelBase) {
       expr = RelOptUtil.pushFilterPastProject(expr, (ProjectRelBase) r);
       return isPartitionPredicate(expr, ((ProjectRelBase) r).getChild());
-    } else if ( r instanceof FilterRelBase ) {
+    } else if (r instanceof FilterRelBase) {
       return isPartitionPredicate(expr, ((FilterRelBase) r).getChild());
-    } else if ( r instanceof HiveTableScanRel ) {
-      RelOptHiveTable table = (RelOptHiveTable)
-          ((HiveTableScanRel)r).getTable();
+    } else if (r instanceof HiveTableScanRel) {
+      RelOptHiveTable table = (RelOptHiveTable) ((HiveTableScanRel) r).getTable();
       BitSet cols = RelOptUtil.InputFinder.bits(expr);
       return table.containsPartitionColumnsOnly(cols);
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdRowCount.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdRowCount.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdRowCount.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdRowCount.java Thu Oct 30 16:22:33 2014
@@ -28,6 +28,7 @@ import net.hydromatic.optiq.util.BitSets
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveTableScanRel;
 import org.eigenbase.rel.FilterRelBase;
 import org.eigenbase.rel.JoinRelBase;
 import org.eigenbase.rel.JoinRelType;
@@ -41,6 +42,7 @@ import org.eigenbase.rel.metadata.RelMet
 import org.eigenbase.rel.metadata.RelMetadataQuery;
 import org.eigenbase.rel.rules.SemiJoinRel;
 import org.eigenbase.relopt.RelOptUtil;
+import org.eigenbase.relopt.hep.HepRelVertex;
 import org.eigenbase.rex.RexBuilder;
 import org.eigenbase.rex.RexCall;
 import org.eigenbase.rex.RexInputRef;
@@ -190,7 +192,7 @@ public class HiveRelMdRowCount extends R
     RelOptUtil.classifyFilters(joinRel, joinFilters, joinRel.getJoinType(),
         false, !joinRel.getJoinType().generatesNullsOnRight(), !joinRel
             .getJoinType().generatesNullsOnLeft(), joinFilters, leftFilters,
-        rightFilters, joinTypeHolder, false);
+        rightFilters);
 
     Pair<Integer, Integer> joinCols = canHandleJoin(joinRel, leftFilters,
         rightFilters, joinFilters);
@@ -270,10 +272,11 @@ public class HiveRelMdRowCount extends R
     if (pkSide == 0) {
       FKSideInfo fkInfo = new FKSideInfo(rightRowCount,
           rightNDV);
+      double pkSelectivity = pkSelectivity(joinRel, true, left, leftRowCount);
       PKSideInfo pkInfo = new PKSideInfo(leftRowCount,
           leftNDV,
           joinRel.getJoinType().generatesNullsOnRight() ? 1.0 :
-            isPKSideSimpleTree ? RelMetadataQuery.getSelectivity(left, leftPred) : 1.0);
+            pkSelectivity);
 
       return new PKFKRelationInfo(1, fkInfo, pkInfo, ndvScalingFactor, isPKSideSimpleTree);
     }
@@ -281,10 +284,11 @@ public class HiveRelMdRowCount extends R
     if (pkSide == 1) {
       FKSideInfo fkInfo = new FKSideInfo(leftRowCount,
           leftNDV);
+      double pkSelectivity = pkSelectivity(joinRel, false, right, rightRowCount);
       PKSideInfo pkInfo = new PKSideInfo(rightRowCount,
           rightNDV,
           joinRel.getJoinType().generatesNullsOnLeft() ? 1.0 :
-            isPKSideSimpleTree ?  RelMetadataQuery.getSelectivity(right, rightPred) : 1.0);
+            pkSelectivity);
 
       return new PKFKRelationInfo(1, fkInfo, pkInfo, ndvScalingFactor, isPKSideSimpleTree);
     }
@@ -292,6 +296,23 @@ public class HiveRelMdRowCount extends R
     return null;
   }
 
+  private static double pkSelectivity(JoinRelBase joinRel, boolean leftChild,
+      RelNode child,
+      double childRowCount) {
+    if ((leftChild && joinRel.getJoinType().generatesNullsOnRight()) ||
+        (!leftChild && joinRel.getJoinType().generatesNullsOnLeft())) {
+      return 1.0;
+    } else {
+      HiveTableScanRel tScan = HiveRelMdUniqueKeys.getTableScan(child, true);
+      if (tScan != null) {
+        double tRowCount = RelMetadataQuery.getRowCount(tScan);
+        return childRowCount / tRowCount;
+      } else {
+        return 1.0;
+      }
+    }
+  }
+
   private static boolean isKey(BitSet c, RelNode rel) {
     boolean isKey = false;
     Set<BitSet> keys = RelMetadataQuery.getUniqueKeys(rel);
@@ -384,6 +405,10 @@ public class HiveRelMdRowCount extends R
     @Override
     public void visit(RelNode node, int ordinal, RelNode parent) {
 
+      if (node instanceof HepRelVertex) {
+        node = ((HepRelVertex) node).getCurrentRel();
+      }
+
       if (node instanceof TableAccessRelBase) {
         simpleTree = true;
       } else if (node instanceof ProjectRelBase) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdSelectivity.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdSelectivity.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdSelectivity.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdSelectivity.java Thu Oct 30 16:22:33 2014
@@ -37,6 +37,7 @@ import org.eigenbase.rel.metadata.RelMet
 import org.eigenbase.rel.metadata.RelMetadataQuery;
 import org.eigenbase.rex.RexNode;
 import org.eigenbase.rex.RexUtil;
+import org.eigenbase.util.Pair;
 
 import com.google.common.collect.ImmutableMap;
 
@@ -67,7 +68,15 @@ public class HiveRelMdSelectivity extend
 
   private Double computeInnerJoinSelectivity(HiveJoinRel j, RexNode predicate) {
     double ndvCrossProduct = 1;
-    RexNode combinedPredicate = getCombinedPredicateForJoin(j, predicate);
+    Pair<Boolean, RexNode> predInfo =
+        getCombinedPredicateForJoin(j, predicate);
+    if (!predInfo.getKey()) {
+      return
+          new FilterSelectivityEstimator(j).
+          estimateSelectivity(predInfo.getValue());
+    }
+
+    RexNode combinedPredicate = predInfo.getValue();
     JoinPredicateInfo jpi = JoinPredicateInfo.constructJoinPredicateInfo(j,
         combinedPredicate);
     ImmutableMap.Builder<Integer, Double> colStatMapBuilder = ImmutableMap
@@ -175,7 +184,14 @@ public class HiveRelMdSelectivity extend
     return ndvCrossProduct;
   }
 
-  private RexNode getCombinedPredicateForJoin(HiveJoinRel j, RexNode additionalPredicate) {
+  /**
+   * 
+   * @param j
+   * @param additionalPredicate
+   * @return if predicate is the join condition return (true, joinCond)
+   * else return (false, minusPred)
+   */
+  private Pair<Boolean,RexNode> getCombinedPredicateForJoin(HiveJoinRel j, RexNode additionalPredicate) {
     RexNode minusPred = RelMdUtil.minusPreds(j.getCluster().getRexBuilder(), additionalPredicate,
         j.getCondition());
 
@@ -184,10 +200,10 @@ public class HiveRelMdSelectivity extend
       minusList.add(j.getCondition());
       minusList.add(minusPred);
 
-      return RexUtil.composeConjunction(j.getCluster().getRexBuilder(), minusList, true);
+      return new Pair<Boolean,RexNode>(false, minusPred);
     }
 
-    return j.getCondition();
+    return new Pair<Boolean,RexNode>(true,j.getCondition());
   }
 
   /**

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdUniqueKeys.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdUniqueKeys.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdUniqueKeys.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/HiveRelMdUniqueKeys.java Thu Oct 30 16:22:33 2014
@@ -30,6 +30,7 @@ import net.hydromatic.optiq.util.BitSets
 
 import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveTableScanRel;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
+import org.eigenbase.rel.FilterRelBase;
 import org.eigenbase.rel.ProjectRelBase;
 import org.eigenbase.rel.RelNode;
 import org.eigenbase.rel.metadata.BuiltInMetadata;
@@ -37,6 +38,7 @@ import org.eigenbase.rel.metadata.Metada
 import org.eigenbase.rel.metadata.ReflectiveRelMetadataProvider;
 import org.eigenbase.rel.metadata.RelMdUniqueKeys;
 import org.eigenbase.rel.metadata.RelMetadataProvider;
+import org.eigenbase.relopt.hep.HepRelVertex;
 import org.eigenbase.rex.RexInputRef;
 import org.eigenbase.rex.RexNode;
 
@@ -59,16 +61,15 @@ public class HiveRelMdUniqueKeys {
    */
   public Set<BitSet> getUniqueKeys(ProjectRelBase rel, boolean ignoreNulls) {
 
-    RelNode child = rel.getChild();
+    HiveTableScanRel tScan = getTableScan(rel.getChild(), false);
 
-    if (!(child instanceof HiveTableScanRel)) {
+    if ( tScan == null ) {
       Function<RelNode, Metadata> fn = RelMdUniqueKeys.SOURCE.apply(
           rel.getClass(), BuiltInMetadata.UniqueKeys.class);
       return ((BuiltInMetadata.UniqueKeys) fn.apply(rel))
           .getUniqueKeys(ignoreNulls);
     }
 
-    HiveTableScanRel tScan = (HiveTableScanRel) child;
     Map<Integer, Integer> posMap = new HashMap<Integer, Integer>();
     int projectPos = 0;
     int colStatsPos = 0;
@@ -112,4 +113,26 @@ public class HiveRelMdUniqueKeys {
     return keys;
   }
 
+  /*
+   * traverse a path of Filter, Projects to get to the TableScan.
+   * In case of Unique keys, stop if you reach a Project, it will be handled
+   * by the invocation on the Project.
+   * In case of getting the base rowCount of a Path, keep going past a Project.
+   */
+  static HiveTableScanRel getTableScan(RelNode r, boolean traverseProject) {
+
+    while (r != null && !(r instanceof HiveTableScanRel)) {
+      if (r instanceof HepRelVertex) {
+        r = ((HepRelVertex) r).getCurrentRel();
+      } else if (r instanceof FilterRelBase) {
+        r = ((FilterRelBase) r).getChild();
+      } else if (traverseProject && r instanceof ProjectRelBase) {
+        r = ((ProjectRelBase) r).getChild();
+      } else {
+        r = null;
+      }
+    }
+    return r == null ? null : (HiveTableScanRel) r;
+  }
+
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTBuilder.java Thu Oct 30 16:22:33 2014
@@ -136,6 +136,10 @@ class ASTBuilder {
   }
 
   static ASTNode literal(RexLiteral literal) {
+    return literal(literal, false);
+  }
+
+  static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) {
     Object val = null;
     int type = 0;
     SqlTypeName sqlType = literal.getType().getSqlTypeName();
@@ -147,18 +151,33 @@ class ASTBuilder {
       type = HiveParser.BigintLiteral;
       break;
     case TINYINT:
-      val = literal.getValue3();
+      if (useTypeQualInLiteral) {
+        val = literal.getValue3() + "Y";
+      } else {
+        val = literal.getValue3();
+      }
       type = HiveParser.TinyintLiteral;
       break;
     case SMALLINT:
-      val = literal.getValue3();
+      if (useTypeQualInLiteral) {
+        val = literal.getValue3() + "S";
+      } else {
+        val = literal.getValue3();
+      }
       type = HiveParser.SmallintLiteral;
       break;
     case INTEGER:
-    case BIGINT:
       val = literal.getValue3();
       type = HiveParser.BigintLiteral;
       break;
+    case BIGINT:
+      if (useTypeQualInLiteral) {
+        val = literal.getValue3() + "L";
+      } else {
+        val = literal.getValue3();
+      }
+      type = HiveParser.BigintLiteral;
+      break;
     case DOUBLE:
       val = literal.getValue3() + "D";
       type = HiveParser.Number;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java Thu Oct 30 16:22:33 2014
@@ -25,6 +25,8 @@ import java.util.Map;
 
 import net.hydromatic.optiq.util.BitSets;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.optimizer.optiq.OptiqSemanticException;
 import org.apache.hadoop.hive.ql.optimizer.optiq.RelOptHiveTable;
@@ -63,6 +65,7 @@ import org.eigenbase.sql.type.SqlTypeNam
 import com.google.common.collect.Iterables;
 
 public class ASTConverter {
+  private static final Log LOG = LogFactory.getLog(ASTConverter.class);
 
   private RelNode          root;
   private HiveAST          hiveAST;
@@ -149,8 +152,9 @@ public class ASTConverter {
       int i = 0;
 
       for (RexNode r : select.getChildExps()) {
-        ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(new RexVisitor(schema)), select
-            .getRowType().getFieldNames().get(i++));
+        ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(
+             new RexVisitor(schema, r instanceof RexLiteral)),
+                  select.getRowType().getFieldNames().get(i++));
         b.add(selectExpr);
       }
     }
@@ -329,10 +333,16 @@ public class ASTConverter {
   static class RexVisitor extends RexVisitorImpl<ASTNode> {
 
     private final Schema schema;
+    private boolean useTypeQualInLiteral;
 
     protected RexVisitor(Schema schema) {
+      this(schema, false);
+    }
+
+    protected RexVisitor(Schema schema, boolean useTypeQualInLiteral) {
       super(true);
       this.schema = schema;
+      this.useTypeQualInLiteral = useTypeQualInLiteral;
     }
 
     @Override
@@ -357,7 +367,7 @@ public class ASTConverter {
 
     @Override
     public ASTNode visitLiteral(RexLiteral literal) {
-      return ASTBuilder.literal(literal);
+      return ASTBuilder.literal(literal, useTypeQualInLiteral);
     }
 
     private ASTNode getPSpecAST(RexWindow window) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/PlanModifierForASTConv.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/PlanModifierForASTConv.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/PlanModifierForASTConv.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/PlanModifierForASTConv.java Thu Oct 30 16:22:33 2014
@@ -22,13 +22,18 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.optimizer.optiq.HiveOptiqUtil;
 import org.apache.hadoop.hive.ql.optimizer.optiq.OptiqSemanticException;
 import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveAggregateRel;
 import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveProjectRel;
 import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveSortRel;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.eigenbase.rel.AggregateCall;
 import org.eigenbase.rel.AggregateRelBase;
+import org.eigenbase.rel.Aggregation;
 import org.eigenbase.rel.EmptyRel;
 import org.eigenbase.rel.FilterRelBase;
 import org.eigenbase.rel.JoinRelBase;
@@ -40,31 +45,52 @@ import org.eigenbase.rel.SetOpRel;
 import org.eigenbase.rel.SingleRel;
 import org.eigenbase.rel.SortRel;
 import org.eigenbase.rel.rules.MultiJoinRel;
+import org.eigenbase.relopt.RelOptUtil;
 import org.eigenbase.relopt.hep.HepRelVertex;
 import org.eigenbase.relopt.volcano.RelSubset;
 import org.eigenbase.reltype.RelDataType;
+import org.eigenbase.reltype.RelDataTypeFactory;
 import org.eigenbase.rex.RexNode;
+import org.eigenbase.sql.SqlKind;
 import org.eigenbase.util.Pair;
 
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 
 public class PlanModifierForASTConv {
+  private static final Log LOG = LogFactory.getLog(PlanModifierForASTConv.class);
 
   public static RelNode convertOpTree(RelNode rel, List<FieldSchema> resultSchema)
       throws OptiqSemanticException {
     RelNode newTopNode = rel;
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Original plan for PlanModifier\n " + RelOptUtil.toString(newTopNode));
+    }
 
     if (!(newTopNode instanceof ProjectRelBase) && !(newTopNode instanceof SortRel)) {
       newTopNode = introduceDerivedTable(newTopNode);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Plan after top-level introduceDerivedTable\n "
+            + RelOptUtil.toString(newTopNode));
+      }
     }
 
     convertOpTree(newTopNode, (RelNode) null);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Plan after nested convertOpTree\n " + RelOptUtil.toString(newTopNode));
+    }
 
     Pair<RelNode, RelNode> topSelparentPair = HiveOptiqUtil.getTopLevelSelect(newTopNode);
     fixTopOBSchema(newTopNode, topSelparentPair, resultSchema);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Plan after fixTopOBSchema\n " + RelOptUtil.toString(newTopNode));
+    }
+
     topSelparentPair = HiveOptiqUtil.getTopLevelSelect(newTopNode);
     newTopNode = renameTopLevelSelectInResultSchema(newTopNode, topSelparentPair, resultSchema);
-
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Final plan after modifier\n " + RelOptUtil.toString(newTopNode));
+    }
     return newTopNode;
   }
 
@@ -108,8 +134,14 @@ public class PlanModifierForASTConv {
           introduceDerivedTable(((HiveSortRel) rel).getChild(), rel);
         }
       } else if (rel instanceof HiveAggregateRel) {
+        RelNode newParent = parent;
         if (!validGBParent(rel, parent)) {
-          introduceDerivedTable(rel, parent);
+          newParent = introduceDerivedTable(rel, parent);
+        }
+        // check if groupby is empty and there is no other cols in aggr
+        // this should only happen when newParent is constant.
+        if (isEmptyGrpAggr(rel)) {
+          replaceEmptyGroupAggr(rel, newParent);
         }
       }
     }
@@ -125,36 +157,54 @@ public class PlanModifierForASTConv {
   private static void fixTopOBSchema(final RelNode rootRel,
       Pair<RelNode, RelNode> topSelparentPair, List<FieldSchema> resultSchema)
       throws OptiqSemanticException {
-    if (topSelparentPair.getKey() instanceof SortRel
-        && HiveOptiqUtil.orderRelNode(topSelparentPair.getKey())) {
-      HiveSortRel obRel = (HiveSortRel) topSelparentPair.getKey();
-      ProjectRelBase obChild = (ProjectRelBase) topSelparentPair.getValue();
-
-      if (obChild.getRowType().getFieldCount() > resultSchema.size()) {
-        RelDataType rt = obChild.getRowType();
-        Set<Integer> collationInputRefs = new HashSet(RelCollationImpl.ordinals(obRel
-            .getCollation()));
-        ImmutableMap.Builder<Integer, RexNode> inputRefToCallMapBldr = ImmutableMap.builder();
-        for (int i = resultSchema.size(); i < rt.getFieldCount(); i++) {
-          if (collationInputRefs.contains(i)) {
-            inputRefToCallMapBldr.put(i, obChild.getChildExps().get(i));
-          }
-        }
-
-        ImmutableMap<Integer, RexNode> inputRefToCallMap = inputRefToCallMapBldr.build();
-        if ((obChild.getRowType().getFieldCount() - inputRefToCallMap.size()) == resultSchema
-            .size()) {
-          HiveProjectRel replacementProjectRel = HiveProjectRel.create(obChild.getChild(), obChild
-              .getChildExps().subList(0, resultSchema.size()), obChild.getRowType().getFieldNames()
-              .subList(0, resultSchema.size()));
-          obRel.replaceInput(0, replacementProjectRel);
-          obRel.setInputRefToCallMap(inputRefToCallMap);
-        } else {
-          throw new OptiqSemanticException(
-              "Result Schema didn't match Optiq Optimized Op Tree Schema");
-        }
+    if (!(topSelparentPair.getKey() instanceof SortRel)
+        || !HiveOptiqUtil.orderRelNode(topSelparentPair.getKey())) {
+      return;
+    }
+    HiveSortRel obRel = (HiveSortRel) topSelparentPair.getKey();
+    ProjectRelBase obChild = (ProjectRelBase) topSelparentPair.getValue();
+    if (obChild.getRowType().getFieldCount() <= resultSchema.size()) {
+      return;
+    }
+
+    RelDataType rt = obChild.getRowType();
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    Set<Integer> collationInputRefs = new HashSet(
+        RelCollationImpl.ordinals(obRel.getCollation()));
+    ImmutableMap.Builder<Integer, RexNode> inputRefToCallMapBldr = ImmutableMap.builder();
+    for (int i = resultSchema.size(); i < rt.getFieldCount(); i++) {
+      if (collationInputRefs.contains(i)) {
+        inputRefToCallMapBldr.put(i, obChild.getChildExps().get(i));
       }
     }
+    ImmutableMap<Integer, RexNode> inputRefToCallMap = inputRefToCallMapBldr.build();
+
+    if ((obChild.getRowType().getFieldCount() - inputRefToCallMap.size()) != resultSchema.size()) {
+      LOG.error(generateInvalidSchemaMessage(obChild, resultSchema, inputRefToCallMap.size()));
+      throw new OptiqSemanticException("Result Schema didn't match Optimized Op Tree Schema");
+    }
+    // This removes order-by only expressions from the projections.
+    HiveProjectRel replacementProjectRel = HiveProjectRel.create(obChild.getChild(), obChild
+        .getChildExps().subList(0, resultSchema.size()), obChild.getRowType().getFieldNames()
+        .subList(0, resultSchema.size()));
+    obRel.replaceInput(0, replacementProjectRel);
+    obRel.setInputRefToCallMap(inputRefToCallMap);
+  }
+
+  private static String generateInvalidSchemaMessage(ProjectRelBase topLevelProj,
+      List<FieldSchema> resultSchema, int fieldsForOB) {
+    String errorDesc = "Result Schema didn't match Optiq Optimized Op Tree; schema: ";
+    for (FieldSchema fs : resultSchema) {
+      errorDesc += "[" + fs.getName() + ":" + fs.getType() + "], ";
+    }
+    errorDesc += " projection fields: ";
+    for (RexNode exp : topLevelProj.getChildExps()) {
+      errorDesc += "[" + exp.toString() + ":" + exp.getType() + "], ";
+    }
+    if (fieldsForOB != 0) {
+      errorDesc += fieldsForOB + " fields removed due to ORDER BY  ";
+    }
+    return errorDesc.substring(0, errorDesc.length() - 2);
   }
 
   private static RelNode renameTopLevelSelectInResultSchema(final RelNode rootRel,
@@ -167,22 +217,18 @@ public class PlanModifierForASTConv {
     // (limit)?(OB)?(ProjectRelBase)....
     List<RexNode> rootChildExps = originalProjRel.getChildExps();
     if (resultSchema.size() != rootChildExps.size()) {
-      // this is a bug in Hive where for queries like select key,value,value
-      // convertRowSchemaToResultSetSchema() only returns schema containing
-      // key,value
-      // Underlying issue is much deeper because it seems like RowResolver
-      // itself doesnt have
-      // those mappings. see limit_pushdown.q & limit_pushdown_negative.q
-      // Till Hive issue is fixed, disable CBO for such queries.
-      throw new OptiqSemanticException("Result Schema didn't match Optiq Optimized Op Tree Schema");
+      // Safeguard against potential issues in CBO RowResolver construction. Disable CBO for now.
+      LOG.error(generateInvalidSchemaMessage(originalProjRel, resultSchema, 0));
+      throw new OptiqSemanticException("Result Schema didn't match Optimized Op Tree Schema");
     }
 
     List<String> newSelAliases = new ArrayList<String>();
     String colAlias;
     for (int i = 0; i < rootChildExps.size(); i++) {
       colAlias = resultSchema.get(i).getName();
-      if (colAlias.startsWith("_"))
+      if (colAlias.startsWith("_")) {
         colAlias = colAlias.substring(1);
+      }
       newSelAliases.add(colAlias);
     }
 
@@ -206,7 +252,7 @@ public class PlanModifierForASTConv {
     return select;
   }
 
-  private static void introduceDerivedTable(final RelNode rel, RelNode parent) {
+  private static RelNode introduceDerivedTable(final RelNode rel, RelNode parent) {
     int i = 0;
     int pos = -1;
     List<RelNode> childList = parent.getInputs();
@@ -226,6 +272,8 @@ public class PlanModifierForASTConv {
     RelNode select = introduceDerivedTable(rel);
 
     parent.replaceInput(pos, select);
+    
+    return select;
   }
 
   private static boolean validJoinParent(RelNode joinNode, RelNode parent) {
@@ -310,4 +358,40 @@ public class PlanModifierForASTConv {
 
     return validChild;
   }
+  
+  private static boolean isEmptyGrpAggr(RelNode gbNode) {
+    // Verify if both groupset and aggrfunction are empty)
+    AggregateRelBase aggrnode = (AggregateRelBase) gbNode;
+    if (aggrnode.getGroupSet().isEmpty() && aggrnode.getAggCallList().isEmpty()) {
+      return true;
+    }
+    return false;
+  }
+  
+  private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) {
+    // If this function is called, the parent should only include constant
+    List<RexNode> exps = parent.getChildExps();
+    for (RexNode rexNode : exps) {
+      if (rexNode.getKind() != SqlKind.LITERAL) {
+        throw new RuntimeException("We expect " + parent.toString()
+            + " to contain only constants. However, " + rexNode.toString() + " is "
+            + rexNode.getKind());
+      }
+    }
+    HiveAggregateRel oldAggRel = (HiveAggregateRel) rel;
+    RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory();
+    RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory);
+    RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory);
+    // Create the dummy aggregation.
+    Aggregation countFn = (Aggregation) SqlFunctionConverter.getOptiqAggFn("count",
+        ImmutableList.of(intType), longType);
+    // TODO: Using 0 might be wrong; might need to walk down to find the
+    // proper index of a dummy.
+    List<Integer> argList = ImmutableList.of(0);
+    AggregateCall dummyCall = new AggregateCall(countFn, false, argList, longType, null);
+    AggregateRelBase newAggRel = oldAggRel.copy(oldAggRel.getTraitSet(), oldAggRel.getChild(),
+        oldAggRel.getGroupSet(), ImmutableList.of(dummyCall));
+    RelNode select = introduceDerivedTable(newAggRel);
+    parent.replaceInput(0, select);
+  }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/RexNodeConverter.java Thu Oct 30 16:22:33 2014
@@ -82,35 +82,35 @@ public class RexNodeConverter {
   private static final Log LOG = LogFactory.getLog(RexNodeConverter.class);
 
   private static class InputCtx {
-    private final RelDataType                   m_optiqInpDataType;
-    private final ImmutableMap<String, Integer> m_hiveNameToPosMap;
-    private final RowResolver                   m_hiveRR;
-    private final int                           m_offsetInOptiqSchema;
+    private final RelDataType                   optiqInpDataType;
+    private final ImmutableMap<String, Integer> hiveNameToPosMap;
+    private final RowResolver                   hiveRR;
+    private final int                           offsetInOptiqSchema;
 
     private InputCtx(RelDataType optiqInpDataType, ImmutableMap<String, Integer> hiveNameToPosMap,
         RowResolver hiveRR, int offsetInOptiqSchema) {
-      m_optiqInpDataType = optiqInpDataType;
-      m_hiveNameToPosMap = hiveNameToPosMap;
-      m_hiveRR = hiveRR;
-      m_offsetInOptiqSchema = offsetInOptiqSchema;
+      this.optiqInpDataType = optiqInpDataType;
+      this.hiveNameToPosMap = hiveNameToPosMap;
+      this.hiveRR = hiveRR;
+      this.offsetInOptiqSchema = offsetInOptiqSchema;
     }
   };
 
-  private final RelOptCluster           m_cluster;
-  private final ImmutableList<InputCtx> m_inputCtxs;
-  private final boolean                 m_flattenExpr;
+  private final RelOptCluster           cluster;
+  private final ImmutableList<InputCtx> inputCtxs;
+  private final boolean                 flattenExpr;
 
   public RexNodeConverter(RelOptCluster cluster, RelDataType inpDataType,
       ImmutableMap<String, Integer> nameToPosMap, int offset, boolean flattenExpr) {
-    this.m_cluster = cluster;
-    m_inputCtxs = ImmutableList.of(new InputCtx(inpDataType, nameToPosMap, null, offset));
-    m_flattenExpr = flattenExpr;
+    this.cluster = cluster;
+    this.inputCtxs = ImmutableList.of(new InputCtx(inpDataType, nameToPosMap, null, offset));
+    this.flattenExpr = flattenExpr;
   }
 
   public RexNodeConverter(RelOptCluster cluster, List<InputCtx> inpCtxLst, boolean flattenExpr) {
-    this.m_cluster = cluster;
-    m_inputCtxs = ImmutableList.<InputCtx> builder().addAll(inpCtxLst).build();
-    m_flattenExpr = flattenExpr;
+    this.cluster = cluster;
+    this.inputCtxs = ImmutableList.<InputCtx> builder().addAll(inpCtxLst).build();
+    this.flattenExpr = flattenExpr;
   }
 
   public RexNode convert(ExprNodeDesc expr) throws SemanticException {
@@ -134,7 +134,7 @@ public class RexNodeConverter {
     RexNode rexNode = convert(fieldDesc.getDesc());
     if (rexNode instanceof RexCall) {
       // regular case of accessing nested field in a column
-      return m_cluster.getRexBuilder().makeFieldAccess(rexNode, fieldDesc.getFieldName(), true);
+      return cluster.getRexBuilder().makeFieldAccess(rexNode, fieldDesc.getFieldName(), true);
     } else {
       // This may happen for schema-less tables, where columns are dynamically
       // supplied by serdes.
@@ -184,7 +184,7 @@ public class RexNodeConverter {
         }
 
       }
-      argTypeBldr.add(TypeConverter.convert(tmpExprNode.getTypeInfo(), m_cluster.getTypeFactory()));
+      argTypeBldr.add(TypeConverter.convert(tmpExprNode.getTypeInfo(), cluster.getTypeFactory()));
       tmpRN = convert(tmpExprNode);
       childRexNodeLst.add(tmpRN);
     }
@@ -196,20 +196,20 @@ public class RexNodeConverter {
 
     if (expr == null) {
       // This is not a cast; process the function.
-      retType = TypeConverter.convert(func.getTypeInfo(), m_cluster.getTypeFactory());
+      retType = TypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory());
       SqlOperator optiqOp = SqlFunctionConverter.getOptiqOperator(func.getFuncText(),
           func.getGenericUDF(), argTypeBldr.build(), retType);
-      expr = m_cluster.getRexBuilder().makeCall(optiqOp, childRexNodeLst);
+      expr = cluster.getRexBuilder().makeCall(optiqOp, childRexNodeLst);
     } else {
       retType = expr.getType();
     }
 
     // TODO: Cast Function in Optiq have a bug where it infertype on cast throws
     // an exception
-    if (m_flattenExpr && (expr instanceof RexCall)
+    if (flattenExpr && (expr instanceof RexCall)
         && !(((RexCall) expr).getOperator() instanceof SqlCastFunction)) {
       RexCall call = (RexCall) expr;
-      expr = m_cluster.getRexBuilder().makeCall(retType, call.getOperator(),
+      expr = cluster.getRexBuilder().makeCall(retType, call.getOperator(),
           RexUtil.flatten(call.getOperands(), call.getOperator()));
     }
 
@@ -246,8 +246,8 @@ public class RexNodeConverter {
       if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
           || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {
-        castExpr = m_cluster.getRexBuilder().makeAbstractCast(
-            TypeConverter.convert(func.getTypeInfo(), m_cluster.getTypeFactory()),
+        castExpr = cluster.getRexBuilder().makeAbstractCast(
+            TypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory()),
             childRexNodeLst.get(0));
       }
     }
@@ -258,15 +258,15 @@ public class RexNodeConverter {
   private InputCtx getInputCtx(ExprNodeColumnDesc col) throws SemanticException {
     InputCtx ctxLookingFor = null;
 
-    if (m_inputCtxs.size() == 1) {
-      ctxLookingFor = m_inputCtxs.get(0);
+    if (inputCtxs.size() == 1) {
+      ctxLookingFor = inputCtxs.get(0);
     } else {
       String tableAlias = col.getTabAlias();
       String colAlias = col.getColumn();
       int noInp = 0;
-      for (InputCtx ic : m_inputCtxs) {
-        if (tableAlias == null || ic.m_hiveRR.hasTableAlias(tableAlias)) {
-          if (ic.m_hiveRR.getPosition(colAlias) >= 0) {
+      for (InputCtx ic : inputCtxs) {
+        if (tableAlias == null || ic.hiveRR.hasTableAlias(tableAlias)) {
+          if (ic.hiveRR.getPosition(colAlias) >= 0) {
             ctxLookingFor = ic;
             noInp++;
           }
@@ -282,16 +282,16 @@ public class RexNodeConverter {
 
   protected RexNode convert(ExprNodeColumnDesc col) throws SemanticException {
     InputCtx ic = getInputCtx(col);
-    int pos = ic.m_hiveNameToPosMap.get(col.getColumn());
-    return m_cluster.getRexBuilder().makeInputRef(
-        ic.m_optiqInpDataType.getFieldList().get(pos).getType(), pos + ic.m_offsetInOptiqSchema);
+    int pos = ic.hiveNameToPosMap.get(col.getColumn());
+    return cluster.getRexBuilder().makeInputRef(
+        ic.optiqInpDataType.getFieldList().get(pos).getType(), pos + ic.offsetInOptiqSchema);
   }
 
   private static final BigInteger MIN_LONG_BI = BigInteger.valueOf(Long.MIN_VALUE),
       MAX_LONG_BI = BigInteger.valueOf(Long.MAX_VALUE);
 
   protected RexNode convert(ExprNodeConstantDesc literal) throws OptiqSemanticException {
-    RexBuilder rexBuilder = m_cluster.getRexBuilder();
+    RexBuilder rexBuilder = cluster.getRexBuilder();
     RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
     PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
     RelDataType optiqDataType = TypeConverter.convert(hiveType, dtFactory);
@@ -314,7 +314,7 @@ public class RexNodeConverter {
       optiqLiteral = rexBuilder.makeBinaryLiteral(bs);
       break;
     case SHORT:
-      optiqLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value));
+      optiqLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), optiqDataType);
       break;
     case INT:
       optiqLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
@@ -353,7 +353,7 @@ public class RexNodeConverter {
         // will work...
         // An alternative would be to throw CboSemanticException and fall back
         // to no CBO.
-        RelDataType relType = m_cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL,
+        RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL,
             bd.scale(), unscaled.toString().length());
         optiqLiteral = rexBuilder.makeExactLiteral(bd, relType);
       }
@@ -397,8 +397,8 @@ public class RexNodeConverter {
   }
 
   private RexNode createNullLiteral(ExprNodeDesc expr) throws OptiqSemanticException {
-    return m_cluster.getRexBuilder().makeNullLiteral(
-        TypeConverter.convert(expr.getTypeInfo(), m_cluster.getTypeFactory()).getSqlTypeName());
+    return cluster.getRexBuilder().makeNullLiteral(
+        TypeConverter.convert(expr.getTypeInfo(), cluster.getTypeFactory()).getSqlTypeName());
   }
 
   public static RexNode convert(RelOptCluster cluster, ExprNodeDesc joinCondnExprNode,

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java Thu Oct 30 16:22:33 2014
@@ -64,7 +64,7 @@ import com.google.common.collect.Immutab
 import com.google.common.collect.Maps;
 
 public class SqlFunctionConverter {
-  private static final Log LOG = LogFactory.getLog(SqlFunctionConverter.class);
+  private static final Log                 LOG = LogFactory.getLog(SqlFunctionConverter.class);
 
   static final Map<String, SqlOperator>    hiveToOptiq;
   static final Map<SqlOperator, HiveToken> optiqToHiveToken;
@@ -87,10 +87,12 @@ public class SqlFunctionConverter {
     } // do generic lookup
     String name = null;
     if (StringUtils.isEmpty(funcTextName)) {
-      name = getName(hiveUDF); // this should probably never happen, see getName comment
+      name = getName(hiveUDF); // this should probably never happen, see getName
+                               // comment
       LOG.warn("The function text was empty, name from annotation is " + name);
     } else {
-      // We could just do toLowerCase here and let SA qualify it, but let's be proper...
+      // We could just do toLowerCase here and let SA qualify it, but let's be
+      // proper...
       name = FunctionRegistry.getNormalizedFunctionName(funcTextName);
     }
     return getOptiqFn(name, optiqArgTypes, retType);
@@ -115,11 +117,10 @@ public class SqlFunctionConverter {
       if (castType.equals(TypeInfoFactory.byteTypeInfo)) {
         castUDF = FunctionRegistry.getFunctionInfo("tinyint");
       } else if (castType instanceof CharTypeInfo) {
-        castUDF = handleCastForParameterizedType(castType,
-          FunctionRegistry.getFunctionInfo("char"));
+        castUDF = handleCastForParameterizedType(castType, FunctionRegistry.getFunctionInfo("char"));
       } else if (castType instanceof VarcharTypeInfo) {
         castUDF = handleCastForParameterizedType(castType,
-          FunctionRegistry.getFunctionInfo("varchar"));
+            FunctionRegistry.getFunctionInfo("varchar"));
       } else if (castType.equals(TypeInfoFactory.stringTypeInfo)) {
         castUDF = FunctionRegistry.getFunctionInfo("string");
       } else if (castType.equals(TypeInfoFactory.booleanTypeInfo)) {
@@ -140,24 +141,24 @@ public class SqlFunctionConverter {
         castUDF = FunctionRegistry.getFunctionInfo("datetime");
       } else if (castType instanceof DecimalTypeInfo) {
         castUDF = handleCastForParameterizedType(castType,
-          FunctionRegistry.getFunctionInfo("decimal"));
+            FunctionRegistry.getFunctionInfo("decimal"));
       } else if (castType.equals(TypeInfoFactory.binaryTypeInfo)) {
         castUDF = FunctionRegistry.getFunctionInfo("binary");
-      } else throw new IllegalStateException("Unexpected type : " +
-        castType.getQualifiedName());
+      } else
+        throw new IllegalStateException("Unexpected type : " + castType.getQualifiedName());
     }
 
     return castUDF;
   }
 
   private static FunctionInfo handleCastForParameterizedType(TypeInfo ti, FunctionInfo fi) {
-    SettableUDF udf = (SettableUDF)fi.getGenericUDF();
+    SettableUDF udf = (SettableUDF) fi.getGenericUDF();
     try {
       udf.setTypeInfo(ti);
     } catch (UDFArgumentException e) {
       throw new RuntimeException(e);
     }
-    return new FunctionInfo(fi.isNative(),fi.getDisplayName(),(GenericUDF)udf);
+    return new FunctionInfo(fi.isNative(), fi.getDisplayName(), (GenericUDF) udf);
   }
 
   // TODO: 1) handle Agg Func Name translation 2) is it correct to add func args
@@ -175,11 +176,10 @@ public class SqlFunctionConverter {
         } else if (op.kind == SqlKind.PLUS_PREFIX) {
           node = (ASTNode) ParseDriver.adaptor.create(HiveParser.PLUS, "PLUS");
         } else {
-          if (op.getName().toUpperCase()
-              .equals(SqlStdOperatorTable.COUNT.getName())
+          if (op.getName().toUpperCase().equals(SqlStdOperatorTable.COUNT.getName())
               && children.size() == 0) {
-            node = (ASTNode) ParseDriver.adaptor.create(
-                HiveParser.TOK_FUNCTIONSTAR, "TOK_FUNCTIONSTAR");
+            node = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_FUNCTIONSTAR,
+                "TOK_FUNCTIONSTAR");
           }
           node.addChild((ASTNode) ParseDriver.adaptor.create(HiveParser.Identifier, op.getName()));
         }
@@ -210,9 +210,12 @@ public class SqlFunctionConverter {
 
   }
 
-  // TODO: this is not valid. Function names for built-in UDFs are specified in FunctionRegistry,
-  //       and only happen to match annotations. For user UDFs, the name is what user specifies at
-  //       creation time (annotation can be absent, different, or duplicate some other function).
+  // TODO: this is not valid. Function names for built-in UDFs are specified in
+  // FunctionRegistry,
+  // and only happen to match annotations. For user UDFs, the name is what user
+  // specifies at
+  // creation time (annotation can be absent, different, or duplicate some other
+  // function).
   private static String getName(GenericUDF hiveUDF) {
     String udfName = null;
     if (hiveUDF instanceof GenericUDFBridge) {
@@ -287,70 +290,72 @@ public class SqlFunctionConverter {
   }
 
   public static class OptiqUDAF extends SqlAggFunction {
-    final ImmutableList<RelDataType> m_argTypes;
-    final RelDataType                m_retType;
+    final ImmutableList<RelDataType> argTypes;
+    final RelDataType                retType;
 
     public OptiqUDAF(String opName, SqlReturnTypeInference returnTypeInference,
         SqlOperandTypeInference operandTypeInference, SqlOperandTypeChecker operandTypeChecker,
         ImmutableList<RelDataType> argTypes, RelDataType retType) {
       super(opName, SqlKind.OTHER_FUNCTION, returnTypeInference, operandTypeInference,
           operandTypeChecker, SqlFunctionCategory.USER_DEFINED_FUNCTION);
-      m_argTypes = argTypes;
-      m_retType = retType;
+      this.argTypes = argTypes;
+      this.retType = retType;
     }
 
     @Override
     public List<RelDataType> getParameterTypes(final RelDataTypeFactory typeFactory) {
-      return m_argTypes;
+      return this.argTypes;
     }
 
     @Override
     public RelDataType getReturnType(final RelDataTypeFactory typeFactory) {
-      return m_retType;
+      return this.retType;
     }
   }
 
   private static class OptiqUDFInfo {
-    private String                     m_udfName;
-    private SqlReturnTypeInference     m_returnTypeInference;
-    private SqlOperandTypeInference    m_operandTypeInference;
-    private SqlOperandTypeChecker      m_operandTypeChecker;
-    private ImmutableList<RelDataType> m_argTypes;
-    private RelDataType                m_retType;
+    private String                     udfName;
+    private SqlReturnTypeInference     returnTypeInference;
+    private SqlOperandTypeInference    operandTypeInference;
+    private SqlOperandTypeChecker      operandTypeChecker;
+    private ImmutableList<RelDataType> argTypes;
+    private RelDataType                retType;
   }
 
   private static OptiqUDFInfo getUDFInfo(String hiveUdfName,
       ImmutableList<RelDataType> optiqArgTypes, RelDataType optiqRetType) {
     OptiqUDFInfo udfInfo = new OptiqUDFInfo();
-    udfInfo.m_udfName = hiveUdfName;
-    udfInfo.m_returnTypeInference = ReturnTypes.explicit(optiqRetType);
-    udfInfo.m_operandTypeInference = InferTypes.explicit(optiqArgTypes);
+    udfInfo.udfName = hiveUdfName;
+    udfInfo.returnTypeInference = ReturnTypes.explicit(optiqRetType);
+    udfInfo.operandTypeInference = InferTypes.explicit(optiqArgTypes);
     ImmutableList.Builder<SqlTypeFamily> typeFamilyBuilder = new ImmutableList.Builder<SqlTypeFamily>();
     for (RelDataType at : optiqArgTypes) {
       typeFamilyBuilder.add(Util.first(at.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
     }
-    udfInfo.m_operandTypeChecker = OperandTypes.family(typeFamilyBuilder.build());
+    udfInfo.operandTypeChecker = OperandTypes.family(typeFamilyBuilder.build());
 
-    udfInfo.m_argTypes = ImmutableList.<RelDataType> copyOf(optiqArgTypes);
-    udfInfo.m_retType = optiqRetType;
+    udfInfo.argTypes = ImmutableList.<RelDataType> copyOf(optiqArgTypes);
+    udfInfo.retType = optiqRetType;
 
     return udfInfo;
   }
 
   public static SqlOperator getOptiqFn(String hiveUdfName,
-      ImmutableList<RelDataType> optiqArgTypes, RelDataType optiqRetType) throws OptiqSemanticException{
+      ImmutableList<RelDataType> optiqArgTypes, RelDataType optiqRetType)
+      throws OptiqSemanticException {
 
     if (hiveUdfName != null && hiveUdfName.trim().equals("<=>")) {
       // We can create Optiq IS_DISTINCT_FROM operator for this. But since our
-      // join reordering algo cant handle this anyway there is no advantage of this.
+      // join reordering algo cant handle this anyway there is no advantage of
+      // this.
       // So, bail out for now.
       throw new OptiqSemanticException("<=> is not yet supported for cbo.");
     }
     SqlOperator optiqOp = hiveToOptiq.get(hiveUdfName);
     if (optiqOp == null) {
       OptiqUDFInfo uInf = getUDFInfo(hiveUdfName, optiqArgTypes, optiqRetType);
-      optiqOp = new SqlFunction(uInf.m_udfName, SqlKind.OTHER_FUNCTION, uInf.m_returnTypeInference,
-          uInf.m_operandTypeInference, uInf.m_operandTypeChecker,
+      optiqOp = new SqlFunction(uInf.udfName, SqlKind.OTHER_FUNCTION, uInf.returnTypeInference,
+          uInf.operandTypeInference, uInf.operandTypeChecker,
           SqlFunctionCategory.USER_DEFINED_FUNCTION);
     }
 
@@ -363,8 +368,8 @@ public class SqlFunctionConverter {
     if (optiqAggFn == null) {
       OptiqUDFInfo uInf = getUDFInfo(hiveUdfName, optiqArgTypes, optiqRetType);
 
-      optiqAggFn = new OptiqUDAF(uInf.m_udfName, uInf.m_returnTypeInference,
-          uInf.m_operandTypeInference, uInf.m_operandTypeChecker, uInf.m_argTypes, uInf.m_retType);
+      optiqAggFn = new OptiqUDAF(uInf.udfName, uInf.returnTypeInference, uInf.operandTypeInference,
+          uInf.operandTypeChecker, uInf.argTypes, uInf.retType);
     }
 
     return optiqAggFn;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java Thu Oct 30 16:22:33 2014
@@ -31,10 +31,13 @@ import org.apache.hadoop.hive.ql.lib.Nod
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.optimizer.ConstantPropagateProcFactory;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
@@ -133,7 +136,15 @@ public final class PcrOpProcFactory {
 
       if (wrapper.state == PcrExprProcFactory.WalkState.TRUE) {
         owc.getOpToRemove().add(new PcrOpWalkerCtx.OpToDeleteInfo(pop, fop));
-      } else if (wrapper.state != PcrExprProcFactory.WalkState.FALSE) {
+      } else if (wrapper.state == PcrExprProcFactory.WalkState.CONSTANT && wrapper.outExpr instanceof ExprNodeGenericFuncDesc) {
+        ExprNodeDesc desc = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc)wrapper.outExpr);
+        if (desc != null && desc instanceof ExprNodeConstantDesc && Boolean.TRUE.equals(((ExprNodeConstantDesc)desc).getValue())) {
+          owc.getOpToRemove().add(new PcrOpWalkerCtx.OpToDeleteInfo(pop, fop));
+        } else {
+          fop.getConf().setPredicate(wrapper.outExpr);
+        }
+      }
+      else if (wrapper.state != PcrExprProcFactory.WalkState.FALSE) {
         fop.getConf().setPredicate(wrapper.outExpr);
       } else {
         LOG.warn("Filter passes no row");