You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/07/25 06:56:23 UTC

[spark] branch branch-3.2 updated: Revert "[SPARK-39856][SQL][TESTS] Increase the number of partitions in TPC-DS build to avoid out-of-memory"

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 0d11085e682 Revert "[SPARK-39856][SQL][TESTS] Increase the number of partitions in TPC-DS build to avoid out-of-memory"
0d11085e682 is described below

commit 0d11085e682eefb19cb4e6882b217a1225c27dbc
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Mon Jul 25 15:55:59 2022 +0900

    Revert "[SPARK-39856][SQL][TESTS] Increase the number of partitions in TPC-DS build to avoid out-of-memory"
    
    This reverts commit e7aa9671276c435cca851fc53931db302b64bbac.
---
 .../test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala    | 9 +++------
 1 file changed, 3 insertions(+), 6 deletions(-)

diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala
index 4b1a2626ef4..952e8968020 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala
@@ -58,7 +58,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
 
   // To make output results deterministic
   protected override def sparkConf: SparkConf = super.sparkConf
-    .set(SQLConf.SHUFFLE_PARTITIONS.key, 4.toString)
+    .set(SQLConf.SHUFFLE_PARTITIONS.key, "1")
 
   protected override def createSparkSession: TestSparkSession = {
     new TestSparkSession(new SparkContext("local[1]", this.getClass.getSimpleName, sparkConf))
@@ -100,9 +100,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
   private def runQuery(query: String, goldenFile: File): Unit = {
     val (schema, output) = handleExceptions(getNormalizedResult(spark, query))
     val queryString = query.trim
-    val outputString = output.mkString("\n")
-      .replaceAll("\\s+$", "")
-      .replaceAll("""([0-9]+.[0-9]{10})([0-9]*)""", "$1")
+    val outputString = output.mkString("\n").replaceAll("\\s+$", "")
     if (regenerateGoldenFiles) {
       val goldenOutput = {
         s"-- Automatically generated by ${getClass.getSimpleName}\n\n" +
@@ -129,8 +127,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
         s"Expected 3 blocks in result file but got ${segments.size}. " +
           "Try regenerate the result files.")
 
-      (segments(1).trim, segments(2)
-        .replaceAll("\\s+$", "").replaceAll("""([0-9]+.[0-9]{10})([0-9]*)""", "$1"))
+      (segments(1).trim, segments(2).replaceAll("\\s+$", ""))
     }
 
     assertResult(expectedSchema, s"Schema did not match\n$queryString") { schema }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org