You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2022/01/01 04:31:24 UTC

[spark] branch master updated: [SPARK-37795][BUILD] Add a scalastyle rule to ban `org.apache.log4j` imports

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 11596b3  [SPARK-37795][BUILD] Add a scalastyle rule to ban `org.apache.log4j` imports
11596b3 is described below

commit 11596b3b17b5e0f54e104cd49b1397c33c34719d
Author: William Hyun <wi...@apache.org>
AuthorDate: Fri Dec 31 20:30:20 2021 -0800

    [SPARK-37795][BUILD] Add a scalastyle rule to ban `org.apache.log4j` imports
    
    ### What changes were proposed in this pull request?
    This PR aims to add a new checkstyle rule to ban `org.apache.log4j` imports.
    
    ### Why are the changes needed?
    This will help us remove the log4j bridge in the future.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass the CIs.
    
    Closes #35077 from williamhyun/checkstylelog4j.
    
    Authored-by: William Hyun <wi...@apache.org>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 core/src/main/scala/org/apache/spark/SparkContext.scala      | 3 ++-
 core/src/main/scala/org/apache/spark/TestUtils.scala         | 2 ++
 core/src/test/scala/org/apache/spark/SparkContextSuite.scala | 9 +++++----
 core/src/test/scala/org/apache/spark/util/UtilsSuite.scala   | 9 +++++----
 scalastyle-config.xml                                        | 6 ++++++
 5 files changed, 20 insertions(+), 9 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 0c5fb0a..86bf725 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -39,6 +39,7 @@ import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, Doub
 import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, TextInputFormat}
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, Job => NewHadoopJob}
 import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFormat}
+import org.apache.logging.log4j.Level
 
 import org.apache.spark.annotation.{DeveloperApi, Experimental}
 import org.apache.spark.broadcast.Broadcast
@@ -383,7 +384,7 @@ class SparkContext(config: SparkConf) extends Logging {
     require(SparkContext.VALID_LOG_LEVELS.contains(upperCased),
       s"Supplied level $logLevel did not match one of:" +
         s" ${SparkContext.VALID_LOG_LEVELS.mkString(",")}")
-    Utils.setLogLevel(org.apache.logging.log4j.Level.toLevel(upperCased))
+    Utils.setLogLevel(Level.toLevel(upperCased))
   }
 
   try {
diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala
index 65ef813..9bc6ccb 100644
--- a/core/src/main/scala/org/apache/spark/TestUtils.scala
+++ b/core/src/main/scala/org/apache/spark/TestUtils.scala
@@ -41,7 +41,9 @@ import scala.util.Try
 
 import com.google.common.io.{ByteStreams, Files}
 import org.apache.commons.lang3.StringUtils
+// scalastyle:off
 import org.apache.log4j.PropertyConfigurator
+// scalastyle:on
 import org.eclipse.jetty.server.Handler
 import org.eclipse.jetty.server.Server
 import org.eclipse.jetty.server.handler.DefaultHandler
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 3bc8841..411a3b1 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path
 import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
 import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
+import org.apache.logging.log4j.{Level, LogManager}
 import org.json4s.{DefaultFormats, Extraction}
 import org.junit.Assert.{assertEquals, assertFalse}
 import org.scalatest.concurrent.Eventually
@@ -612,15 +613,15 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
 
   test("log level case-insensitive and reset log level") {
     sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
-    val originalLevel = org.apache.log4j.Logger.getRootLogger().getLevel
+    val originalLevel = LogManager.getRootLogger().getLevel
     try {
       sc.setLogLevel("debug")
-      assert(org.apache.log4j.Logger.getRootLogger().getLevel === org.apache.log4j.Level.DEBUG)
+      assert(LogManager.getRootLogger().getLevel === Level.DEBUG)
       sc.setLogLevel("INfo")
-      assert(org.apache.log4j.Logger.getRootLogger().getLevel === org.apache.log4j.Level.INFO)
+      assert(LogManager.getRootLogger().getLevel === Level.INFO)
     } finally {
       sc.setLogLevel(originalLevel.toString)
-      assert(org.apache.log4j.Logger.getRootLogger().getLevel === originalLevel)
+      assert(LogManager.getRootLogger().getLevel === originalLevel)
       sc.stop()
     }
   }
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index bfb1bc3..6117dec 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -36,6 +36,7 @@ import org.apache.commons.lang3.{JavaVersion, SystemUtils}
 import org.apache.commons.math3.stat.inference.ChiSquareTest
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
+import org.apache.logging.log4j.Level
 
 import org.apache.spark.{SparkConf, SparkException, SparkFunSuite, TaskContext}
 import org.apache.spark.internal.Logging
@@ -688,11 +689,11 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
     val rootLogger = org.apache.logging.log4j.LogManager.getRootLogger()
     val current = rootLogger.getLevel()
     try {
-      Utils.setLogLevel(org.apache.logging.log4j.Level.ALL)
-      assert(rootLogger.getLevel == org.apache.logging.log4j.Level.ALL)
+      Utils.setLogLevel(Level.ALL)
+      assert(rootLogger.getLevel == Level.ALL)
       assert(log.isInfoEnabled())
-      Utils.setLogLevel(org.apache.logging.log4j.Level.ERROR)
-      assert(rootLogger.getLevel == org.apache.logging.log4j.Level.ERROR)
+      Utils.setLogLevel(Level.ERROR)
+      assert(rootLogger.getLevel == Level.ERROR)
       assert(!log.isInfoEnabled())
       assert(log.isErrorEnabled())
     } finally {
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 396d69b..791d910 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -335,6 +335,12 @@ This file is divided into 3 sections:
     ]]></customMessage>
   </check>
 
+  <check level="error" class="org.scalastyle.scalariform.IllegalImportsChecker" enabled="true">
+    <parameters><parameter name="illegalImports"><![CDATA[org.apache.log4j]]></parameter></parameters>
+    <customMessage>Please use Apache Log4j 2 instead.</customMessage>
+  </check>
+
+
   <!-- ================================================================================ -->
   <!--       rules we'd like to enforce, but haven't cleaned up the codebase yet        -->
   <!-- ================================================================================ -->

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org