You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/12/21 11:58:09 UTC

[spark] branch branch-3.1 updated: [SPARK-33850][SQL][FOLLOWUP] Improve and cleanup the test code

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new a1eea37  [SPARK-33850][SQL][FOLLOWUP] Improve and cleanup the test code
a1eea37 is described below

commit a1eea3700da8e1033b44972c1cb0865ac8c4626b
Author: Kousuke Saruta <sa...@oss.nttdata.com>
AuthorDate: Mon Dec 21 09:40:42 2020 +0900

    [SPARK-33850][SQL][FOLLOWUP] Improve and cleanup the test code
    
    ### What changes were proposed in this pull request?
    
    This PR mainly improves and cleans up the test code introduced in #30855 based on the comment.
    The test code is actually taken from another test `explain formatted - check presence of subquery in case of DPP` so this PR cleans the code too ( removed unnecessary `withTable`).
    
    ### Why are the changes needed?
    
    To keep the test code clean.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    `ExplainSuite` passes.
    
    Closes #30861 from sarutak/followup-SPARK-33850.
    
    Authored-by: Kousuke Saruta <sa...@oss.nttdata.com>
    Signed-off-by: Takeshi Yamamuro <ya...@apache.org>
    (cherry picked from commit 3c8be3983cd390306e9abbfe078536a08881a5d6)
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../scala/org/apache/spark/sql/ExplainSuite.scala  | 25 ++++++++--------------
 1 file changed, 9 insertions(+), 16 deletions(-)

diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
index 732d61b..fd1706a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
@@ -249,7 +249,6 @@ class ExplainSuite extends ExplainSuiteHelper with DisableAdaptiveExecutionSuite
       withSQLConf(SQLConf.DYNAMIC_PARTITION_PRUNING_ENABLED.key -> "true",
         SQLConf.DYNAMIC_PARTITION_PRUNING_REUSE_BROADCAST_ONLY.key -> "false",
         SQLConf.EXCHANGE_REUSE_ENABLED.key -> "false") {
-        withTable("df1", "df2") {
           spark.range(1000).select(col("id"), col("id").as("k"))
             .write
             .partitionBy("k")
@@ -289,27 +288,21 @@ class ExplainSuite extends ExplainSuiteHelper with DisableAdaptiveExecutionSuite
             assert(expected_pattern4.r.findAllMatchIn(normalizedOutput).length == 1)
           }
         }
-      }
     }
   }
 
   test("SPARK-33850: explain formatted - check presence of subquery in case of AQE") {
-    withTable("df1") {
-      withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true") {
-        withTable("df1") {
-          spark.range(1, 100)
-            .write
-            .format("parquet")
-            .mode("overwrite")
-            .saveAsTable("df1")
+    withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true") {
+      withTempView("df") {
+        val df = spark.range(1, 100)
+        df.createTempView("df")
 
-          val sqlText = "EXPLAIN FORMATTED SELECT (SELECT min(id) FROM df1) as v"
-          val expected_pattern1 =
-            "Subquery:1 Hosting operator id = 2 Hosting Expression = Subquery subquery#x"
+        val sqlText = "EXPLAIN FORMATTED SELECT (SELECT min(id) FROM df) as v"
+        val expected_pattern =
+          "Subquery:1 Hosting operator id = 2 Hosting Expression = Subquery subquery#x"
 
-          withNormalizedExplain(sqlText) { normalizedOutput =>
-            assert(expected_pattern1.r.findAllMatchIn(normalizedOutput).length == 1)
-          }
+        withNormalizedExplain(sqlText) { normalizedOutput =>
+          assert(expected_pattern.r.findAllMatchIn(normalizedOutput).length == 1)
         }
       }
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org