You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by mr...@apache.org on 2020/08/12 05:23:06 UTC

[spark] branch master updated: [SPARK-32596][CORE] Clear Ivy resolution files as part of finally block

This is an automated email from the ASF dual-hosted git repository.

mridulm80 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2d6eb00  [SPARK-32596][CORE] Clear Ivy resolution files as part of finally block
2d6eb00 is described below

commit 2d6eb00256d1ebc7e2cd82b13286dd9571a6b331
Author: Venkata krishnan Sowrirajan <vs...@linkedin.com>
AuthorDate: Wed Aug 12 00:22:22 2020 -0500

    [SPARK-32596][CORE] Clear Ivy resolution files as part of finally block
    
    <!--
    Thanks for sending a pull request!  Here are some tips for you:
      1. If this is your first time, please read our contributor guidelines: https://spark.apache.org/contributing.html
      2. Ensure you have added or run the appropriate tests for your PR: https://spark.apache.org/developer-tools.html
      3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][SPARK-XXXX] Your PR title ...'.
      4. Be sure to keep the PR description updated to reflect all changes.
      5. Please write your PR title to summarize what this PR proposes.
      6. If possible, provide a concise example to reproduce the issue for a faster review.
      7. If you want to add a new configuration, please read the guideline first for naming configurations in
         'core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala'.
    -->
    
    ### What changes were proposed in this pull request?
    Clear Ivy resolution files as part of finally block if not failures while artifacts resolution can leave the resolution files around.
    Use tempIvyPath for SparkSubmitUtils.buildIvySettings in tests. This why the test
    "SPARK-10878: test resolution files cleaned after resolving artifact" did not capture these issues.
    
    ### Why are the changes needed?
    This is a bug
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Existing unit tests
    
    Closes #29411 from venkata91/SPARK-32596.
    
    Authored-by: Venkata krishnan Sowrirajan <vs...@linkedin.com>
    Signed-off-by: Mridul Muralidharan <mridul<at>gmail.com>
---
 .../org/apache/spark/deploy/SparkSubmit.scala      | 23 ++++++++++------------
 .../spark/deploy/SparkSubmitUtilsSuite.scala       | 16 +++++++--------
 2 files changed, 18 insertions(+), 21 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 6d38a1d..8363d57 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -28,7 +28,7 @@ import java.util.jar.JarInputStream
 import scala.annotation.tailrec
 import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
-import scala.util.{Failure, Properties, Success, Try}
+import scala.util.{Properties, Try}
 
 import org.apache.commons.io.FilenameUtils
 import org.apache.commons.lang3.StringUtils
@@ -1347,6 +1347,13 @@ private[spark] object SparkSubmitUtils {
       ""
     } else {
       val sysOut = System.out
+      // Default configuration name for ivy
+      val ivyConfName = "default"
+
+      // A Module descriptor must be specified. Entries are dummy strings
+      val md = getModuleDescriptor
+
+      md.setDefaultConf(ivyConfName)
       try {
         // To prevent ivy from logging to system out
         System.setOut(printStream)
@@ -1374,14 +1381,6 @@ private[spark] object SparkSubmitUtils {
           resolveOptions.setDownload(true)
         }
 
-        // Default configuration name for ivy
-        val ivyConfName = "default"
-
-        // A Module descriptor must be specified. Entries are dummy strings
-        val md = getModuleDescriptor
-
-        md.setDefaultConf(ivyConfName)
-
         // Add exclusion rules for Spark and Scala Library
         addExclusionRules(ivySettings, ivyConfName, md)
         // add all supplied maven artifacts as dependencies
@@ -1399,12 +1398,10 @@ private[spark] object SparkSubmitUtils {
           packagesDirectory.getAbsolutePath + File.separator +
             "[organization]_[artifact]-[revision](-[classifier]).[ext]",
           retrieveOptions.setConfs(Array(ivyConfName)))
-        val paths = resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
-        val mdId = md.getModuleRevisionId
-        clearIvyResolutionFiles(mdId, ivySettings, ivyConfName)
-        paths
+        resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
       } finally {
         System.setOut(sysOut)
+        clearIvyResolutionFiles(md.getModuleRevisionId, ivySettings, ivyConfName)
       }
     }
   }
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 31e6c730e..2a37f75 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -79,7 +79,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
   test("create additional resolvers") {
     val repos = "a/1,b/2,c/3"
-    val settings = SparkSubmitUtils.buildIvySettings(Option(repos), None)
+    val settings = SparkSubmitUtils.buildIvySettings(Option(repos), Some(tempIvyPath))
     val resolver = settings.getDefaultResolver.asInstanceOf[ChainResolver]
     assert(resolver.getResolvers.size() === 4)
     val expected = repos.split(",").map(r => s"$r/")
@@ -134,7 +134,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
       // end to end
       val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
         main.toString,
-        SparkSubmitUtils.buildIvySettings(Option(repo), Option(tempIvyPath)),
+        SparkSubmitUtils.buildIvySettings(Option(repo), Some(tempIvyPath)),
         isTest = true)
       assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy path")
     }
@@ -147,7 +147,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
     IvyTestUtils.withRepository(main, Some(dep), Some(SparkSubmitUtils.m2Path)) { repo =>
       val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
         main.toString,
-        SparkSubmitUtils.buildIvySettings(None, None),
+        SparkSubmitUtils.buildIvySettings(None, Some(tempIvyPath)),
         isTest = true)
       assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
       assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
@@ -158,7 +158,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
     IvyTestUtils.withRepository(main, Some(dep), Some(ivyLocal), useIvyLayout = true) { repo =>
       val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
         main.toString,
-        SparkSubmitUtils.buildIvySettings(None, None),
+        SparkSubmitUtils.buildIvySettings(None, Some(tempIvyPath)),
         isTest = true)
       assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
       assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
@@ -182,7 +182,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
     intercept[RuntimeException] {
       SparkSubmitUtils.resolveMavenCoordinates(
       "a:b:c",
-      SparkSubmitUtils.buildIvySettings(None, None),
+      SparkSubmitUtils.buildIvySettings(None, Some(tempIvyPath)),
       isTest = true)
     }
   }
@@ -194,14 +194,14 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
     val path = SparkSubmitUtils.resolveMavenCoordinates(
       coordinates,
-      SparkSubmitUtils.buildIvySettings(None, None),
+      SparkSubmitUtils.buildIvySettings(None, Some(tempIvyPath)),
       isTest = true)
     assert(path === "", "should return empty path")
     val main = MavenCoordinate("org.apache.spark", "spark-streaming-kafka-assembly_2.12", "1.2.0")
     IvyTestUtils.withRepository(main, None, None) { repo =>
       val files = SparkSubmitUtils.resolveMavenCoordinates(
         coordinates + "," + main.toString,
-        SparkSubmitUtils.buildIvySettings(Some(repo), None),
+        SparkSubmitUtils.buildIvySettings(Some(repo), Some(tempIvyPath)),
         isTest = true)
       assert(files.indexOf(main.artifactId) >= 0, "Did not return artifact")
     }
@@ -213,7 +213,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
     IvyTestUtils.withRepository(main, Some(dep), None) { repo =>
       val files = SparkSubmitUtils.resolveMavenCoordinates(
         main.toString,
-        SparkSubmitUtils.buildIvySettings(Some(repo), None),
+        SparkSubmitUtils.buildIvySettings(Some(repo), Some(tempIvyPath)),
         Seq("my.great.dep:mydep"),
         isTest = true)
       assert(files.indexOf(main.artifactId) >= 0, "Did not return artifact")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org