You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by hv...@apache.org on 2018/09/13 12:21:05 UTC

spark git commit: [SPARK-25352][SQL][FOLLOWUP] Add helper method and address style issue

Repository: spark
Updated Branches:
  refs/heads/master 3e75a9fa2 -> 5b761c537


[SPARK-25352][SQL][FOLLOWUP] Add helper method and address style issue

## What changes were proposed in this pull request?

This follow-up patch addresses [the review comment](https://github.com/apache/spark/pull/22344/files#r217070658) by adding a helper method to simplify code and fixing style issue.

## How was this patch tested?

Existing unit tests.

Author: Liang-Chi Hsieh <vi...@gmail.com>

Closes #22409 from viirya/SPARK-25352-followup.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5b761c53
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5b761c53
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5b761c53

Branch: refs/heads/master
Commit: 5b761c537a600115450b53817bee0679d5c2bb97
Parents: 3e75a9f
Author: Liang-Chi Hsieh <vi...@gmail.com>
Authored: Thu Sep 13 14:21:00 2018 +0200
Committer: Herman van Hovell <hv...@databricks.com>
Committed: Thu Sep 13 14:21:00 2018 +0200

----------------------------------------------------------------------
 .../spark/sql/execution/SparkStrategies.scala   | 55 ++++++++------------
 1 file changed, 23 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5b761c53/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index 7c8ce31..89442a7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -66,44 +66,35 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
    * Plans special cases of limit operators.
    */
   object SpecialLimits extends Strategy {
-    override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
-      case ReturnAnswer(rootPlan) => rootPlan match {
-        case Limit(IntegerLiteral(limit), s@Sort(order, true, child)) =>
-          if (limit < conf.topKSortFallbackThreshold) {
+    private def decideTopRankNode(limit: Int, child: LogicalPlan): Seq[SparkPlan] = {
+      if (limit < conf.topKSortFallbackThreshold) {
+        child match {
+          case Sort(order, true, child) =>
             TakeOrderedAndProjectExec(limit, order, child.output, planLater(child)) :: Nil
-          } else {
-            GlobalLimitExec(limit,
-              LocalLimitExec(limit, planLater(s)),
-              orderedLimit = true) :: Nil
-          }
-        case Limit(IntegerLiteral(limit), p@Project(projectList, Sort(order, true, child))) =>
-          if (limit < conf.topKSortFallbackThreshold) {
+          case Project(projectList, Sort(order, true, child)) =>
             TakeOrderedAndProjectExec(limit, order, projectList, planLater(child)) :: Nil
-          } else {
-            GlobalLimitExec(limit,
-              LocalLimitExec(limit, planLater(p)),
-              orderedLimit = true) :: Nil
-          }
+        }
+      } else {
+        GlobalLimitExec(limit,
+          LocalLimitExec(limit, planLater(child)),
+          orderedLimit = true) :: Nil
+      }
+    }
+
+    override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
+      case ReturnAnswer(rootPlan) => rootPlan match {
+        case Limit(IntegerLiteral(limit), s @ Sort(order, true, child)) =>
+          decideTopRankNode(limit, s)
+        case Limit(IntegerLiteral(limit), p @ Project(projectList, Sort(order, true, child))) =>
+          decideTopRankNode(limit, p)
         case Limit(IntegerLiteral(limit), child) =>
           CollectLimitExec(limit, planLater(child)) :: Nil
         case other => planLater(other) :: Nil
       }
-      case Limit(IntegerLiteral(limit), s@Sort(order, true, child)) =>
-        if (limit < conf.topKSortFallbackThreshold) {
-          TakeOrderedAndProjectExec(limit, order, child.output, planLater(child)) :: Nil
-        } else {
-          GlobalLimitExec(limit,
-            LocalLimitExec(limit, planLater(s)),
-            orderedLimit = true) :: Nil
-        }
-      case Limit(IntegerLiteral(limit), p@Project(projectList, Sort(order, true, child))) =>
-        if (limit < conf.topKSortFallbackThreshold) {
-          TakeOrderedAndProjectExec(limit, order, projectList, planLater(child)) :: Nil
-        } else {
-          GlobalLimitExec(limit,
-            LocalLimitExec(limit, planLater(p)),
-            orderedLimit = true) :: Nil
-        }
+      case Limit(IntegerLiteral(limit), s @ Sort(order, true, child)) =>
+        decideTopRankNode(limit, s)
+      case Limit(IntegerLiteral(limit), p @ Project(projectList, Sort(order, true, child))) =>
+        decideTopRankNode(limit, p)
       case _ => Nil
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org