You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2017/08/31 21:15:36 UTC
spark git commit: [SPARK-17107][SQL][FOLLOW-UP] Remove redundant
pushdown rule for Union
Repository: spark
Updated Branches:
refs/heads/master 501370d9d -> 7ce110828
[SPARK-17107][SQL][FOLLOW-UP] Remove redundant pushdown rule for Union
## What changes were proposed in this pull request?
Also remove useless function `partitionByDeterministic` after the changes of https://github.com/apache/spark/pull/14687
## How was this patch tested?
N/A
Author: gatorsmile <ga...@gmail.com>
Closes #19097 from gatorsmile/followupSPARK-17107.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7ce11082
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7ce11082
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7ce11082
Branch: refs/heads/master
Commit: 7ce110828608551f22f6cd2abdbd964844b45975
Parents: 501370d
Author: gatorsmile <ga...@gmail.com>
Authored: Thu Aug 31 14:15:34 2017 -0700
Committer: gatorsmile <ga...@gmail.com>
Committed: Thu Aug 31 14:15:34 2017 -0700
----------------------------------------------------------------------
.../spark/sql/catalyst/optimizer/Optimizer.scala | 15 ---------------
1 file changed, 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/7ce11082/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 75d83bc..b73f70a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -380,21 +380,6 @@ object PushProjectionThroughUnion extends Rule[LogicalPlan] with PredicateHelper
result.asInstanceOf[A]
}
- /**
- * Splits the condition expression into small conditions by `And`, and partition them by
- * deterministic, and finally recombine them by `And`. It returns an expression containing
- * all deterministic expressions (the first field of the returned Tuple2) and an expression
- * containing all non-deterministic expressions (the second field of the returned Tuple2).
- */
- private def partitionByDeterministic(condition: Expression): (Expression, Expression) = {
- val andConditions = splitConjunctivePredicates(condition)
- andConditions.partition(_.deterministic) match {
- case (deterministic, nondeterministic) =>
- deterministic.reduceOption(And).getOrElse(Literal(true)) ->
- nondeterministic.reduceOption(And).getOrElse(Literal(true))
- }
- }
-
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
// Push down deterministic projection through UNION ALL
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org