You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2023/02/16 15:26:04 UTC

[spark] branch master updated: [SPARK-42424][YARN] Remove unused declarations from Yarn module

This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f5deb7c9af6 [SPARK-42424][YARN] Remove unused declarations  from Yarn module
f5deb7c9af6 is described below

commit f5deb7c9af618494adecfeb103f94219e27fcf70
Author: yangjie01 <ya...@baidu.com>
AuthorDate: Thu Feb 16 09:25:47 2023 -0600

    [SPARK-42424][YARN] Remove unused declarations  from Yarn module
    
    ### What changes were proposed in this pull request?
    This pr cleans up unused declarations in the Yarn module:
    
    - `YarnSparkHadoopUtil#setEnvFromInputString`: this method Introduced by SPARK-3477 and becomes a unused and no test coverage method after SPARK-17979
    
    - `YarnSparkHadoopUtil#environmentVariableRegex`: this `val` is only used by `YarnSparkHadoopUtil#setEnvFromInputString`
    
    - `ApplicationMasterArguments.DEFAULT_NUMBER_EXECUTORS`: this `val` Introduced by SPARK-1946 and replaced by `YarnSparkHadoopUtil.DEFAULT_NUMBER_EXECUTORS` in SPARK-4138, and It was eventually replaced by `config#EXECUTOR_INSTANCES`
    
    - `ApplicationMaster.EXIT_SECURITY`: this `val` introduced by SPARK-3627 and it is used to represent the exit code of errors related to `System.setSecurityManager`, SPARK-4584 deleted the use of `SecurityManager` and this val is useless.
    
    ### Why are the changes needed?
    Code clean up.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    Closes #39997 from LuciferYang/SPARK-42424.
    
    Authored-by: yangjie01 <ya...@baidu.com>
    Signed-off-by: Sean Owen <sr...@gmail.com>
---
 .../spark/deploy/yarn/ApplicationMaster.scala      |  1 -
 .../deploy/yarn/ApplicationMasterArguments.scala   |  4 --
 .../spark/deploy/yarn/YarnSparkHadoopUtil.scala    | 50 ----------------------
 3 files changed, 55 deletions(-)

diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 9815fa6df8a..252c84a1cd4 100644
--- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -891,7 +891,6 @@ object ApplicationMaster extends Logging {
   private val EXIT_MAX_EXECUTOR_FAILURES = 11
   private val EXIT_REPORTER_FAILURE = 12
   private val EXIT_SC_NOT_INITED = 13
-  private val EXIT_SECURITY = 14
   private val EXIT_EXCEPTION_USER_CLASS = 15
   private val EXIT_EARLY = 16
   private val EXIT_DISCONNECTED = 17
diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
index d2275980814..821a31502af 100644
--- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
+++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
@@ -102,7 +102,3 @@ class ApplicationMasterArguments(val args: Array[String]) {
     System.exit(exitCode)
   }
 }
-
-object ApplicationMasterArguments {
-  val DEFAULT_NUMBER_EXECUTORS = 2
-}
diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
index 1869c739e48..4d050b91a85 100644
--- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
+++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.deploy.yarn
 
-import java.util.regex.{Matcher, Pattern}
-
 import scala.collection.immutable.{Map => IMap}
 import scala.collection.mutable.{HashMap, ListBuffer}
 import scala.util.matching.Regex
@@ -59,41 +57,6 @@ object YarnSparkHadoopUtil {
     env.put(key, newValue)
   }
 
-  /**
-   * Set zero or more environment variables specified by the given input string.
-   * The input string is expected to take the form "KEY1=VAL1,KEY2=VAL2,KEY3=VAL3".
-   */
-  def setEnvFromInputString(env: HashMap[String, String], inputString: String): Unit = {
-    if (inputString != null && inputString.length() > 0) {
-      val childEnvs = inputString.split(",")
-      val p = Pattern.compile(environmentVariableRegex)
-      for (cEnv <- childEnvs) {
-        val parts = cEnv.split("=") // split on '='
-        val m = p.matcher(parts(1))
-        val sb = new StringBuffer
-        while (m.find()) {
-          val variable = m.group(1)
-          var replace = ""
-          if (env.contains(variable)) {
-            replace = env(variable)
-          } else {
-            // if this key is not configured for the child .. get it from the env
-            replace = System.getenv(variable)
-            if (replace == null) {
-            // the env key is note present anywhere .. simply set it
-              replace = ""
-            }
-          }
-          m.appendReplacement(sb, Matcher.quoteReplacement(replace))
-        }
-        m.appendTail(sb)
-        // This treats the environment variable as path variable delimited by `File.pathSeparator`
-        // This is kept for backward compatibility and consistency with Hadoop's behavior
-        addPathToEnvironment(env, parts(0), sb.toString)
-      }
-    }
-  }
-
   /**
    * Regex pattern to match the name of an environment variable. Note that Unix variable naming
    * conventions (alphanumeric plus underscore, case-sensitive, can't start with a digit)
@@ -102,19 +65,6 @@ object YarnSparkHadoopUtil {
    */
   private val envVarNameRegex: String = "[A-Za-z_][A-Za-z0-9_]*"
 
-  /**
-   * Note that this regex only supports the `$VAR_NAME` and `%VAR_NAME%` syntax, for Unix and
-   * Windows respectively, and does not perform any handling of escapes. The Unix `${VAR_NAME}`
-   * syntax is not supported.
-   */
-  private val environmentVariableRegex: String = {
-    if (Utils.isWindows) {
-      s"%($envVarNameRegex)%"
-    } else {
-      s"\\$$($envVarNameRegex)"
-    }
-  }
-
   // scalastyle:off line.size.limit
   /**
    * Replace environment variables in a string according to the same rules as


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org