You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2019/02/28 01:02:14 UTC

[spark] branch master updated: [SPARK-26895][CORE][FOLLOW-UP] Uninitializing log after `prepareSubmitEnvironment` in SparkSubmit

This is an automated email from the ASF dual-hosted git repository.

vanzin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6e31ccf  [SPARK-26895][CORE][FOLLOW-UP] Uninitializing log after `prepareSubmitEnvironment` in SparkSubmit
6e31ccf is described below

commit 6e31ccf2a196881c7b4ffbe0afba7d93a7f2875c
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Wed Feb 27 17:01:30 2019 -0800

    [SPARK-26895][CORE][FOLLOW-UP] Uninitializing log after `prepareSubmitEnvironment` in SparkSubmit
    
    ## What changes were proposed in this pull request?
    
    Currently, if I run `spark-shell` in my local, it started to show the logs as below:
    
    ```
    $ ./bin/spark-shell
    ...
    19/02/28 04:42:43 INFO SecurityManager: Changing view acls to: hkwon
    19/02/28 04:42:43 INFO SecurityManager: Changing modify acls to: hkwon
    19/02/28 04:42:43 INFO SecurityManager: Changing view acls groups to:
    19/02/28 04:42:43 INFO SecurityManager: Changing modify acls groups to:
    19/02/28 04:42:43 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(hkwon); groups with view permissions: Set(); users  with modify permissions: Set(hkwon); groups with modify permissions: Set()
    19/02/28 04:42:43 INFO SignalUtils: Registered signal handler for INT
    19/02/28 04:42:48 INFO SparkContext: Running Spark version 3.0.0-SNAPSHOT
    19/02/28 04:42:48 INFO SparkContext: Submitted application: Spark shell
    19/02/28 04:42:48 INFO SecurityManager: Changing view acls to: hkwon
    ```
    
    Seems to be the cause is https://github.com/apache/spark/pull/23806 and `prepareSubmitEnvironment` looks actually reinitializing the logging again.
    
    This PR proposes to uninitializing log later after `prepareSubmitEnvironment`.
    
    ## How was this patch tested?
    
    Manually tested.
    
    Closes #23911 from HyukjinKwon/SPARK-26895.
    
    Authored-by: Hyukjin Kwon <gu...@apache.org>
    Signed-off-by: Marcelo Vanzin <va...@cloudera.com>
---
 .../main/scala/org/apache/spark/deploy/SparkSubmit.scala | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 2843bd5..45ad7b3 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -144,7 +144,7 @@ private[spark] class SparkSubmit extends Logging {
         try {
           proxyUser.doAs(new PrivilegedExceptionAction[Unit]() {
             override def run(): Unit = {
-              runMain(args)
+              runMain(args, uninitLog)
             }
           })
         } catch {
@@ -159,15 +159,10 @@ private[spark] class SparkSubmit extends Logging {
             }
         }
       } else {
-        runMain(args)
+        runMain(args, uninitLog)
       }
     }
 
-    // Let the main class re-initialize the logging system once it starts.
-    if (uninitLog) {
-      Logging.uninitialize()
-    }
-
     // In standalone cluster mode, there are two submission gateways:
     //   (1) The traditional RPC gateway using o.a.s.deploy.Client as a wrapper
     //   (2) The new REST-based gateway introduced in Spark 1.3
@@ -777,8 +772,13 @@ private[spark] class SparkSubmit extends Logging {
    * Note that this main class will not be the one provided by the user if we're
    * running cluster deploy mode or python applications.
    */
-  private def runMain(args: SparkSubmitArguments): Unit = {
+  private def runMain(args: SparkSubmitArguments, uninitLog: Boolean): Unit = {
     val (childArgs, childClasspath, sparkConf, childMainClass) = prepareSubmitEnvironment(args)
+    // Let the main class re-initialize the logging system once it starts.
+    if (uninitLog) {
+      Logging.uninitialize()
+    }
+
     if (args.verbose) {
       logInfo(s"Main class:\n$childMainClass")
       logInfo(s"Arguments:\n${childArgs.mkString("\n")}")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org