You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/19 01:24:34 UTC

git commit: Merge pull request #437 from mridulm/master

Updated Branches:
  refs/heads/branch-0.9 4ac8cab08 -> 76147a290


Merge pull request #437 from mridulm/master

Minor api usability changes

- Expose checkpoint directory - since it is autogenerated now
- null check for jars
- Expose SparkHadoopUtil : so that configuration creation is abstracted even from user code to avoid duplication of functionality already in spark.
(cherry picked from commit 73dfd42fba5e526cc57e2a2ed78be323b63cb8fa)

Signed-off-by: Patrick Wendell <pw...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/76147a29
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/76147a29
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/76147a29

Branch: refs/heads/branch-0.9
Commit: 76147a290327c8ff76c845cbe347b5fe09de3da7
Parents: 4ac8cab
Author: Patrick Wendell <pw...@gmail.com>
Authored: Sat Jan 18 16:23:56 2014 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Sat Jan 18 16:24:16 2014 -0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala          | 4 +++-
 .../main/scala/org/apache/spark/api/java/JavaSparkContext.scala  | 2 ++
 .../src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala | 1 -
 3 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/76147a29/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index ba3e91e..ddd7d60 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -956,6 +956,8 @@ class SparkContext(
     }
   }
 
+  def getCheckpointDir = checkpointDir
+
   /** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
   def defaultParallelism: Int = taskScheduler.defaultParallelism
 
@@ -1125,7 +1127,7 @@ object SparkContext {
     if (sparkHome != null) {
       res.setSparkHome(sparkHome)
     }
-    if (!jars.isEmpty) {
+    if (jars != null && !jars.isEmpty) {
       res.setJars(jars)
     }
     res.setExecutorEnv(environment.toSeq)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/76147a29/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index 8041163..33c931b 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -401,6 +401,8 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
     sc.setCheckpointDir(dir)
   }
 
+  def getCheckpointDir = JavaUtils.optionToOptional(sc.getCheckpointDir)
+
   protected def checkpointFile[T](path: String): JavaRDD[T] = {
     implicit val cm: ClassTag[T] =
       implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[T]]

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/76147a29/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 27dc42b..b479225 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -28,7 +28,6 @@ import org.apache.spark.{SparkContext, SparkException}
 /**
  * Contains util methods to interact with Hadoop from Spark.
  */
-private[spark]
 class SparkHadoopUtil {
   val conf = newConfiguration()
   UserGroupInformation.setConfiguration(conf)