You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/07/13 00:35:01 UTC
spark git commit: Revert "[SPARK-18646][REPL] Set parent classloader
as null for ExecutorClassLoader"
Repository: spark
Updated Branches:
refs/heads/branch-2.2 39eba3053 -> cf0719b5e
Revert "[SPARK-18646][REPL] Set parent classloader as null for ExecutorClassLoader"
This reverts commit 39eba3053ac99f03d9df56471bae5fc5cc9f4462.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cf0719b5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cf0719b5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cf0719b5
Branch: refs/heads/branch-2.2
Commit: cf0719b5e99333b28bb4066b304dbcf8400c80ea
Parents: 39eba30
Author: Wenchen Fan <we...@databricks.com>
Authored: Thu Jul 13 08:34:42 2017 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Thu Jul 13 08:34:42 2017 +0800
----------------------------------------------------------------------
.../apache/spark/repl/ExecutorClassLoader.scala | 17 +++-----
.../spark/repl/ExecutorClassLoaderSuite.scala | 46 --------------------
2 files changed, 6 insertions(+), 57 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/cf0719b5/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
index 127f673..df13b32 100644
--- a/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
@@ -33,23 +33,18 @@ import org.apache.spark.internal.Logging
import org.apache.spark.util.{ParentClassLoader, Utils}
/**
- * A ClassLoader that reads classes from a Hadoop FileSystem or HTTP URI, used to load classes
- * defined by the interpreter when the REPL is used. Allows the user to specify if user class path
- * should be first. This class loader delegates getting/finding resources to parent loader, which
- * makes sense until REPL never provide resource dynamically.
- *
- * Note: [[ClassLoader]] will preferentially load class from parent. Only when parent is null or
- * the load failed, that it will call the overridden `findClass` function. To avoid the potential
- * issue caused by loading class using inappropriate class loader, we should set the parent of
- * ClassLoader to null, so that we can fully control which class loader is used. For detailed
- * discussion, see SPARK-18646.
+ * A ClassLoader that reads classes from a Hadoop FileSystem or HTTP URI,
+ * used to load classes defined by the interpreter when the REPL is used.
+ * Allows the user to specify if user class path should be first.
+ * This class loader delegates getting/finding resources to parent loader,
+ * which makes sense until REPL never provide resource dynamically.
*/
class ExecutorClassLoader(
conf: SparkConf,
env: SparkEnv,
classUri: String,
parent: ClassLoader,
- userClassPathFirst: Boolean) extends ClassLoader(null) with Logging {
+ userClassPathFirst: Boolean) extends ClassLoader with Logging {
val uri = new URI(classUri)
val directory = uri.getPath
http://git-wip-us.apache.org/repos/asf/spark/blob/cf0719b5/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
----------------------------------------------------------------------
diff --git a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
index 092d3c2..6d274bd 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
@@ -23,8 +23,6 @@ import java.nio.channels.{FileChannel, ReadableByteChannel}
import java.nio.charset.StandardCharsets
import java.nio.file.{Paths, StandardOpenOption}
import java.util
-import java.util.Collections
-import javax.tools.{JavaFileObject, SimpleJavaFileObject, ToolProvider}
import scala.io.Source
import scala.language.implicitConversions
@@ -79,50 +77,6 @@ class ExecutorClassLoaderSuite
}
}
- test("child over system classloader") {
- // JavaFileObject for scala.Option class
- val scalaOptionFile = new SimpleJavaFileObject(
- URI.create(s"string:///scala/Option.java"),
- JavaFileObject.Kind.SOURCE) {
-
- override def getCharContent(ignoreEncodingErrors: Boolean): CharSequence = {
- "package scala; class Option {}"
- }
- }
- // compile fake scala.Option class
- ToolProvider
- .getSystemJavaCompiler
- .getTask(null, null, null, null, null, Collections.singletonList(scalaOptionFile)).call()
-
- // create 'scala' dir in tempDir1
- val scalaDir = new File(tempDir1, "scala")
- assert(scalaDir.mkdir(), s"Failed to create 'scala' directory in $tempDir1")
-
- // move the generated class into scala dir
- val filename = "Option.class"
- val result = new File(filename)
- assert(result.exists(), "Compiled file not found: " + result.getAbsolutePath)
-
- val out = new File(scalaDir, filename)
- Files.move(result, out)
- assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath)
-
- // construct class loader tree
- val parentLoader = new URLClassLoader(urls2, null)
- val classLoader = new ExecutorClassLoader(
- new SparkConf(), null, url1, parentLoader, true)
-
- // load 'scala.Option', using ClassforName to do the exact same behavior as
- // what JavaDeserializationStream does
-
- // scalastyle:off classforname
- val optionClass = Class.forName("scala.Option", false, classLoader)
- // scalastyle:on classforname
-
- assert(optionClass.getClassLoader == classLoader,
- "scala.Option didn't come from ExecutorClassLoader")
- }
-
test("child first") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(new SparkConf(), null, url1, parentLoader, true)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org