You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sa...@apache.org on 2020/07/31 01:40:16 UTC

[spark] branch branch-3.0 updated: [SPARK-32175][SPARK-32175][FOLLOWUP] Remove flaky test added in

This is an automated email from the ASF dual-hosted git repository.

sarutak pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 2a38090  [SPARK-32175][SPARK-32175][FOLLOWUP] Remove flaky test added in
2a38090 is described below

commit 2a3809058f668195ae4f377a293b228d62d970bb
Author: Kousuke Saruta <sa...@oss.nttdata.com>
AuthorDate: Fri Jul 31 10:37:05 2020 +0900

    [SPARK-32175][SPARK-32175][FOLLOWUP] Remove flaky test added in
    
    ### What changes were proposed in this pull request?
    
    This PR removes a test added in SPARK-32175(#29002).
    
    ### Why are the changes needed?
    
    That test is flaky. It can be mitigated by increasing the timeout but it would rather be simpler to remove the test.
    See also the [discussion](https://github.com/apache/spark/pull/29002#issuecomment-666746857).
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    Closes #29314 from sarutak/remove-flaky-test.
    
    Authored-by: Kousuke Saruta <sa...@oss.nttdata.com>
    Signed-off-by: Kousuke Saruta <sa...@oss.nttdata.com>
    (cherry picked from commit 9d7b1d935f7a2b770d8b2f264cfe4a4db2ad64b6)
    Signed-off-by: Kousuke Saruta <sa...@oss.nttdata.com>
---
 .../org/apache/spark/executor/ExecutorSuite.scala  | 67 ----------------------
 1 file changed, 67 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala b/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
index b198448..8e58bef 100644
--- a/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
@@ -403,73 +403,6 @@ class ExecutorSuite extends SparkFunSuite
     assert(taskMetrics.getMetricValue("JVMHeapMemory") > 0)
   }
 
-  test("SPARK-32175: Plugin initialization should start after heartbeater started") {
-    withTempDir { tempDir =>
-      val sparkPluginCodeBody =
-        """
-          |@Override
-          |public org.apache.spark.api.plugin.ExecutorPlugin executorPlugin() {
-          |  return new TestExecutorPlugin();
-          |}
-          |
-          |@Override
-          |public org.apache.spark.api.plugin.DriverPlugin driverPlugin() { return null; }
-        """.stripMargin
-      val executorPluginBody =
-        """
-          |@Override
-          |public void init(
-          |    org.apache.spark.api.plugin.PluginContext ctx,
-          |    java.util.Map<String, String> extraConf) {
-          |  try {
-          |    Thread.sleep(8 * 1000);
-          |  } catch (InterruptedException e) {
-          |    throw new RuntimeException(e);
-          |  }
-          |}
-        """.stripMargin
-
-      val compiledExecutorPlugin = TestUtils.createCompiledClass(
-        "TestExecutorPlugin",
-        tempDir,
-        "",
-        null,
-        Seq.empty,
-        Seq("org.apache.spark.api.plugin.ExecutorPlugin"),
-        executorPluginBody)
-
-      val thisClassPath =
-        sys.props("java.class.path").split(File.pathSeparator).map(p => new File(p).toURI.toURL)
-      val compiledSparkPlugin = TestUtils.createCompiledClass(
-        "TestSparkPlugin",
-        tempDir,
-        "",
-        null,
-        Seq(tempDir.toURI.toURL) ++ thisClassPath,
-        Seq("org.apache.spark.api.plugin.SparkPlugin"),
-        sparkPluginCodeBody)
-
-      val jarUrl = TestUtils.createJar(
-        Seq(compiledSparkPlugin, compiledExecutorPlugin),
-        new File(tempDir, "testPlugin.jar"))
-
-      val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
-      val args = Seq(
-        "--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
-        "--name", "testApp",
-        "--master", "local-cluster[1,1,1024]",
-        "--conf", "spark.plugins=TestSparkPlugin",
-        "--conf", "spark.storage.blockManagerSlaveTimeoutMs=" + 5 * 1000,
-        "--conf", "spark.network.timeoutInterval=" + 1000,
-        "--conf", "spark.executor.heartbeatInterval=" + 1000,
-        "--conf", "spark.executor.extraClassPath=" + jarUrl.toString,
-        "--conf", "spark.driver.extraClassPath=" + jarUrl.toString,
-        "--conf", "spark.ui.enabled=false",
-        unusedJar.toString)
-      SparkSubmitSuite.runSparkSubmit(args, timeout = 30.seconds)
-    }
-  }
-
   private def createMockEnv(conf: SparkConf, serializer: JavaSerializer): SparkEnv = {
     val mockEnv = mock[SparkEnv]
     val mockRpcEnv = mock[RpcEnv]


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org