You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2019/03/22 10:29:02 UTC
[spark] branch master updated: [MINOR][CORE] Leverage modified
Utils.classForName to reduce scalastyle off for Class.forName
This is an automated email from the ASF dual-hosted git repository.
srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 174531c [MINOR][CORE] Leverage modified Utils.classForName to reduce scalastyle off for Class.forName
174531c is described below
commit 174531c183d058c6f92330ef1780e5a5c03d34f0
Author: Jungtaek Lim (HeartSaVioR) <ka...@gmail.com>
AuthorDate: Fri Mar 22 05:28:46 2019 -0500
[MINOR][CORE] Leverage modified Utils.classForName to reduce scalastyle off for Class.forName
## What changes were proposed in this pull request?
This patch modifies Utils.classForName to have optional parameters - initialize, noSparkClassLoader - to let callers of Class.forName with thread context classloader to use it instead. This helps to reduce scalastyle off for Class.forName.
## How was this patch tested?
Existing UTs.
Closes #24148 from HeartSaVioR/MINOR-reduce-scalastyle-off-for-class-forname.
Authored-by: Jungtaek Lim (HeartSaVioR) <ka...@gmail.com>
Signed-off-by: Sean Owen <se...@databricks.com>
---
.../apache/spark/serializer/KryoSerializer.scala | 35 ++++++++++------------
.../org/apache/spark/util/ClosureCleaner.scala | 14 +++------
.../main/scala/org/apache/spark/util/Utils.scala | 20 +++++++++----
.../test/scala/org/apache/spark/FileSuite.scala | 15 +++-------
.../KryoSerializerDistributedSuite.scala | 6 ++--
.../spark/util/MutableURLClassLoaderSuite.scala | 5 +---
6 files changed, 41 insertions(+), 54 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
index 2df133d..eef1997 100644
--- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
@@ -130,7 +130,6 @@ class KryoSerializer(conf: SparkConf)
val kryo = instantiator.newKryo()
kryo.setRegistrationRequired(registrationRequired)
- val oldClassLoader = Thread.currentThread.getContextClassLoader
val classLoader = defaultClassLoader.getOrElse(Thread.currentThread.getContextClassLoader)
// Allow disabling Kryo reference tracking if user knows their object graphs don't have loops.
@@ -156,24 +155,22 @@ class KryoSerializer(conf: SparkConf)
kryo.register(classOf[GenericRecord], new GenericAvroSerializer(avroSchemas))
kryo.register(classOf[GenericData.Record], new GenericAvroSerializer(avroSchemas))
- try {
- // scalastyle:off classforname
- // Use the default classloader when calling the user registrator.
- Thread.currentThread.setContextClassLoader(classLoader)
- // Register classes given through spark.kryo.classesToRegister.
- classesToRegister
- .foreach { className => kryo.register(Class.forName(className, true, classLoader)) }
- // Allow the user to register their own classes by setting spark.kryo.registrator.
- userRegistrators
- .map(Class.forName(_, true, classLoader).getConstructor().
- newInstance().asInstanceOf[KryoRegistrator])
- .foreach { reg => reg.registerClasses(kryo) }
- // scalastyle:on classforname
- } catch {
- case e: Exception =>
- throw new SparkException(s"Failed to register classes with Kryo", e)
- } finally {
- Thread.currentThread.setContextClassLoader(oldClassLoader)
+ // Use the default classloader when calling the user registrator.
+ Utils.withContextClassLoader(classLoader) {
+ try {
+ // Register classes given through spark.kryo.classesToRegister.
+ classesToRegister.foreach { className =>
+ kryo.register(Utils.classForName(className, noSparkClassLoader = true))
+ }
+ // Allow the user to register their own classes by setting spark.kryo.registrator.
+ userRegistrators
+ .map(Utils.classForName(_, noSparkClassLoader = true).getConstructor().
+ newInstance().asInstanceOf[KryoRegistrator])
+ .foreach { reg => reg.registerClasses(kryo) }
+ } catch {
+ case e: Exception =>
+ throw new SparkException(s"Failed to register classes with Kryo", e)
+ }
}
// Register Chill's classes; we do this after our ranges and the user's own classes to let
diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
index 1b3e525..5f725d8 100644
--- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
@@ -378,10 +378,8 @@ private[spark] object ClosureCleaner extends Logging {
} else {
logDebug(s"Cleaning lambda: ${lambdaFunc.get.getImplMethodName}")
- // scalastyle:off classforname
- val captClass = Class.forName(lambdaFunc.get.getCapturingClass.replace('/', '.'),
- false, Thread.currentThread.getContextClassLoader)
- // scalastyle:on classforname
+ val captClass = Utils.classForName(lambdaFunc.get.getCapturingClass.replace('/', '.'),
+ initialize = false, noSparkClassLoader = true)
// Fail fast if we detect return statements in closures
getClassReader(captClass)
.accept(new ReturnStatementFinder(Some(lambdaFunc.get.getImplMethodName)), 0)
@@ -547,12 +545,8 @@ private class InnerClosureFinder(output: Set[Class[_]]) extends ClassVisitor(ASM
if (op == INVOKESPECIAL && name == "<init>" && argTypes.length > 0
&& argTypes(0).toString.startsWith("L") // is it an object?
&& argTypes(0).getInternalName == myName) {
- // scalastyle:off classforname
- output += Class.forName(
- owner.replace('/', '.'),
- false,
- Thread.currentThread.getContextClassLoader)
- // scalastyle:on classforname
+ output += Utils.classForName(owner.replace('/', '.'),
+ initialize = false, noSparkClassLoader = true)
}
}
}
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 0398625..7c8648d 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -189,15 +189,23 @@ private[spark] object Utils extends Logging {
/** Determines whether the provided class is loadable in the current thread. */
def classIsLoadable(clazz: String): Boolean = {
- // scalastyle:off classforname
- Try { Class.forName(clazz, false, getContextOrSparkClassLoader) }.isSuccess
- // scalastyle:on classforname
+ Try { classForName(clazz, initialize = false) }.isSuccess
}
// scalastyle:off classforname
- /** Preferred alternative to Class.forName(className) */
- def classForName(className: String): Class[_] = {
- Class.forName(className, true, getContextOrSparkClassLoader)
+ /**
+ * Preferred alternative to Class.forName(className), as well as
+ * Class.forName(className, initialize, loader) with current thread's ContextClassLoader.
+ */
+ def classForName(
+ className: String,
+ initialize: Boolean = true,
+ noSparkClassLoader: Boolean = false): Class[_] = {
+ if (!noSparkClassLoader) {
+ Class.forName(className, initialize, getContextOrSparkClassLoader)
+ } else {
+ Class.forName(className, initialize, Thread.currentThread().getContextClassLoader)
+ }
// scalastyle:on classforname
}
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 766cb0a..8f212f6 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -200,30 +200,23 @@ class FileSuite extends SparkFunSuite with LocalSparkContext {
}
test("object files of classes from a JAR") {
- // scalastyle:off classforname
- val original = Thread.currentThread().getContextClassLoader
val className = "FileSuiteObjectFileTest"
val jar = TestUtils.createJarWithClasses(Seq(className))
val loader = new java.net.URLClassLoader(Array(jar), Utils.getContextOrSparkClassLoader)
- Thread.currentThread().setContextClassLoader(loader)
- try {
+
+ Utils.withContextClassLoader(loader) {
sc = new SparkContext("local", "test")
val objs = sc.makeRDD(1 to 3).map { x =>
- val loader = Thread.currentThread().getContextClassLoader
- Class.forName(className, true, loader).getConstructor().newInstance()
+ Utils.classForName(className, noSparkClassLoader = true).getConstructor().newInstance()
}
val outputDir = new File(tempDir, "output").getAbsolutePath
objs.saveAsObjectFile(outputDir)
// Try reading the output back as an object file
- val ct = reflect.ClassTag[Any](Class.forName(className, true, loader))
+ val ct = reflect.ClassTag[Any](Utils.classForName(className, noSparkClassLoader = true))
val output = sc.objectFile[Any](outputDir)
assert(output.collect().size === 3)
assert(output.collect().head.getClass.getName === className)
}
- finally {
- Thread.currentThread().setContextClassLoader(original)
- }
- // scalastyle:on classforname
}
test("write SequenceFile using new Hadoop API") {
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
index 57ed1f6..5d76c09 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
@@ -57,10 +57,8 @@ object KryoDistributedTest {
class AppJarRegistrator extends KryoRegistrator {
override def registerClasses(k: Kryo) {
- val classLoader = Thread.currentThread.getContextClassLoader
- // scalastyle:off classforname
- k.register(Class.forName(AppJarRegistrator.customClassName, true, classLoader))
- // scalastyle:on classforname
+ k.register(Utils.classForName(AppJarRegistrator.customClassName,
+ noSparkClassLoader = true))
}
}
diff --git a/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala b/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala
index 8d844bd..8f38ee3 100644
--- a/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala
@@ -134,10 +134,7 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
try {
sc.makeRDD(1 to 5, 2).mapPartitions { x =>
- val loader = Thread.currentThread().getContextClassLoader
- // scalastyle:off classforname
- Class.forName(className, true, loader).getConstructor().newInstance()
- // scalastyle:on classforname
+ Utils.classForName(className, noSparkClassLoader = true).getConstructor().newInstance()
Seq().iterator
}.count()
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org