You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/04/12 20:12:31 UTC
[spark] branch branch-3.0 updated: [SPARK-31424][SQL] Rename
AdaptiveSparkPlanHelper.collectInPlanAndSubqueries to collectWithSubqueries
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 36b0767 [SPARK-31424][SQL] Rename AdaptiveSparkPlanHelper.collectInPlanAndSubqueries to collectWithSubqueries
36b0767 is described below
commit 36b0767a845a153c9972c6c863b197f3d2ecf33b
Author: gatorsmile <ga...@gmail.com>
AuthorDate: Sun Apr 12 13:10:57 2020 -0700
[SPARK-31424][SQL] Rename AdaptiveSparkPlanHelper.collectInPlanAndSubqueries to collectWithSubqueries
### What changes were proposed in this pull request?
Like https://github.com/apache/spark/pull/28092, this PR is to rename `QueryPlan.collectInPlanAndSubqueries` in AdaptiveSparkPlanHelper to `collectWithSubqueries`
### Why are the changes needed?
The old name is too verbose. `QueryPlan` is internal but it's the core of catalyst and we'd better make the API name clearer before we release it.
### Does this PR introduce any user-facing change?
no
### How was this patch tested?
N/A
Closes #28193 from gatorsmile/spark-31322.
Authored-by: gatorsmile <ga...@gmail.com>
Signed-off-by: Dongjoon Hyun <do...@apache.org>
(cherry picked from commit ad79ae11ba1cf30cd496f5edc4c8a9a109fd4a0e)
Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
.../apache/spark/sql/execution/adaptive/AdaptiveSparkPlanHelper.scala | 2 +-
.../apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanHelper.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanHelper.scala
index 61ae6cb..cd87230 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanHelper.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanHelper.scala
@@ -109,7 +109,7 @@ trait AdaptiveSparkPlanHelper {
* Returns a sequence containing the result of applying a partial function to all elements in this
* plan, also considering all the plans in its (nested) subqueries
*/
- def collectInPlanAndSubqueries[B](p: SparkPlan)(f: PartialFunction[SparkPlan, B]): Seq[B] = {
+ def collectWithSubqueries[B](p: SparkPlan)(f: PartialFunction[SparkPlan, B]): Seq[B] = {
(p +: subqueriesAll(p)).flatMap(collect(_)(f))
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
index bfde042..b8ac4ddc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
@@ -95,14 +95,14 @@ class AdaptiveQueryExecSuite
}
private def findReusedExchange(plan: SparkPlan): Seq[ReusedExchangeExec] = {
- collectInPlanAndSubqueries(plan) {
+ collectWithSubqueries(plan) {
case ShuffleQueryStageExec(_, e: ReusedExchangeExec) => e
case BroadcastQueryStageExec(_, e: ReusedExchangeExec) => e
}
}
private def findReusedSubquery(plan: SparkPlan): Seq[ReusedSubqueryExec] = {
- collectInPlanAndSubqueries(plan) {
+ collectWithSubqueries(plan) {
case e: ReusedSubqueryExec => e
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org