You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/07/27 00:27:36 UTC
[spark] branch branch-3.1 updated: [SPARK-39879][SQL][TESTS] Reduce local-cluster maximum memory size in `BroadcastJoinSuite*` and `HiveSparkSubmitSuite`
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new 3768ee1e775 [SPARK-39879][SQL][TESTS] Reduce local-cluster maximum memory size in `BroadcastJoinSuite*` and `HiveSparkSubmitSuite`
3768ee1e775 is described below
commit 3768ee1e775d42920bb2583f5fcb5f15927688ad
Author: yangjie01 <ya...@baidu.com>
AuthorDate: Wed Jul 27 09:26:47 2022 +0900
[SPARK-39879][SQL][TESTS] Reduce local-cluster maximum memory size in `BroadcastJoinSuite*` and `HiveSparkSubmitSuite`
### What changes were proposed in this pull request?
This pr change `local-cluster[2, 1, 1024]` in `BroadcastJoinSuite*` and `HiveSparkSubmitSuite` to `local-cluster[2, 1, 512]` to reduce test maximum memory usage.
### Why are the changes needed?
Reduce the maximum memory usage of test cases.
### Does this PR introduce _any_ user-facing change?
No, test-only.
### How was this patch tested?
Should monitor CI
Closes #37298 from LuciferYang/reduce-local-cluster-memory.
Authored-by: yangjie01 <ya...@baidu.com>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
(cherry picked from commit 01d41e7de418d0a40db7b16ddd0d8546f0794d17)
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../sql/execution/joins/BroadcastJoinSuite.scala | 4 +-
.../spark/sql/hive/HiveSparkSubmitSuite.scala | 43 +++++++++++++++-------
2 files changed, 32 insertions(+), 15 deletions(-)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/BroadcastJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/BroadcastJoinSuite.scala
index 98a1089709b..6883c8d1411 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/BroadcastJoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/BroadcastJoinSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.execution.joins
import scala.reflect.ClassTag
import org.apache.spark.AccumulatorSuite
+import org.apache.spark.internal.config.EXECUTOR_MEMORY
import org.apache.spark.sql.{Dataset, QueryTest, Row, SparkSession}
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, BitwiseAnd, BitwiseOr, Cast, Expression, Literal, ShiftLeft}
import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight, BuildSide}
@@ -54,7 +55,8 @@ abstract class BroadcastJoinSuiteBase extends QueryTest with SQLTestUtils
override def beforeAll(): Unit = {
super.beforeAll()
spark = SparkSession.builder()
- .master("local-cluster[2,1,1024]")
+ .master("local-cluster[2,1,512]")
+ .config(EXECUTOR_MEMORY.key, "512m")
.appName("testing")
.getOrCreate()
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 426d93b3506..862d4a71ca1 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -29,6 +29,7 @@ import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config.EXECUTOR_MEMORY
import org.apache.spark.internal.config.UI.UI_ENABLED
import org.apache.spark.sql.{QueryTest, Row, SparkSession}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
@@ -67,7 +68,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", TemporaryHiveUDFTest.getClass.getName.stripSuffix("$"),
"--name", "TemporaryHiveUDFTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -84,7 +86,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", PermanentHiveUDFTest1.getClass.getName.stripSuffix("$"),
"--name", "PermanentHiveUDFTest1",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -101,7 +104,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", PermanentHiveUDFTest2.getClass.getName.stripSuffix("$"),
"--name", "PermanentHiveUDFTest2",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -120,7 +124,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SparkSubmitClassLoaderTest.getClass.getName.stripSuffix("$"),
"--name", "SparkSubmitClassLoaderTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -135,7 +140,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SparkSQLConfTest.getClass.getName.stripSuffix("$"),
"--name", "SparkSQLConfTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--conf", "spark.sql.hive.metastore.version=0.12",
@@ -173,7 +179,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SPARK_9757.getClass.getName.stripSuffix("$"),
"--name", "SparkSQLConfTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -186,7 +193,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SPARK_11009.getClass.getName.stripSuffix("$"),
"--name", "SparkSQLConfTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -199,7 +207,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SPARK_14244.getClass.getName.stripSuffix("$"),
"--name", "SparkSQLConfTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -212,7 +221,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SetWarehouseLocationTest.getClass.getName.stripSuffix("$"),
"--name", "SetSparkWarehouseLocationTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -249,7 +259,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SetWarehouseLocationTest.getClass.getName.stripSuffix("$"),
"--name", "SetHiveWarehouseLocationTest",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--conf", s"spark.sql.test.expectedWarehouseDir=$hiveWarehouseLocation",
@@ -307,7 +318,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SPARK_18360.getClass.getName.stripSuffix("$"),
"--name", "SPARK-18360",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--driver-java-options", "-Dderby.system.durability=test",
@@ -321,7 +333,8 @@ class HiveSparkSubmitSuite
val argsForCreateTable = Seq(
"--class", SPARK_18989_CREATE_TABLE.getClass.getName.stripSuffix("$"),
"--name", "SPARK-18947",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--jars", HiveTestJars.getHiveContribJar().getCanonicalPath,
@@ -331,7 +344,8 @@ class HiveSparkSubmitSuite
val argsForShowTables = Seq(
"--class", SPARK_18989_DESC_TABLE.getClass.getName.stripSuffix("$"),
"--name", "SPARK-18947",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
unusedJar.toString)
@@ -352,7 +366,8 @@ class HiveSparkSubmitSuite
val args = Seq(
"--class", SPARK_34772.getClass.getName.stripSuffix("$"),
"--name", "SPARK-34772",
- "--master", "local-cluster[2,1,1024]",
+ "--master", "local-cluster[2,1,512]",
+ "--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", s"${LEGACY_TIME_PARSER_POLICY.key}=LEGACY",
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=1.2.1",
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org