You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/07/25 06:56:48 UTC

[spark] branch branch-3.0 updated: Revert "[SPARK-39856][SQL][TESTS] Increase the number of partitions in TPC-DS build to avoid out-of-memory"

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new ea05c33e634 Revert "[SPARK-39856][SQL][TESTS] Increase the number of partitions in TPC-DS build to avoid out-of-memory"
ea05c33e634 is described below

commit ea05c33e6349201ee192168a7e385186cd06b2b4
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Mon Jul 25 15:56:34 2022 +0900

    Revert "[SPARK-39856][SQL][TESTS] Increase the number of partitions in TPC-DS build to avoid out-of-memory"
    
    This reverts commit 0a27d0c6e8e705176f0f245794bc8361860ac680.
---
 .../test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala    | 9 +++------
 1 file changed, 3 insertions(+), 6 deletions(-)

diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala
index 7fb4b567b1a..c16bcd9fd05 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala
@@ -58,7 +58,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
 
   // To make output results deterministic
   protected override def sparkConf: SparkConf = super.sparkConf
-    .set(SQLConf.SHUFFLE_PARTITIONS.key, 4.toString)
+    .set(SQLConf.SHUFFLE_PARTITIONS.key, "1")
 
   protected override def createSparkSession: TestSparkSession = {
     new TestSparkSession(new SparkContext("local[1]", this.getClass.getSimpleName, sparkConf))
@@ -103,9 +103,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
   private def runQuery(query: String, goldenFile: File): Unit = {
     val (schema, output) = handleExceptions(getNormalizedResult(spark, query))
     val queryString = query.trim
-    val outputString = output.mkString("\n")
-      .replaceAll("\\s+$", "")
-      .replaceAll("""([0-9]+.[0-9]{10})([0-9]*)""", "$1")
+    val outputString = output.mkString("\n").replaceAll("\\s+$", "")
     if (regenerateGoldenFiles) {
       val goldenOutput = {
         s"-- Automatically generated by ${getClass.getSimpleName}\n\n" +
@@ -132,8 +130,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
         s"Expected 3 blocks in result file but got ${segments.size}. " +
           "Try regenerate the result files.")
 
-      (segments(1).trim, segments(2)
-        .replaceAll("\\s+$", "").replaceAll("""([0-9]+.[0-9]{10})([0-9]*)""", "$1"))
+      (segments(1).trim, segments(2).replaceAll("\\s+$", ""))
     }
 
     assertResult(expectedSchema, s"Schema did not match\n$queryString") { schema }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org