You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sh...@apache.org on 2015/06/02 06:01:20 UTC
spark git commit: [SPARK-8028] [SPARKR] Use addJar instead of setJars
in SparkR
Repository: spark
Updated Branches:
refs/heads/master 15d7c90ae -> 6b44278ef
[SPARK-8028] [SPARKR] Use addJar instead of setJars in SparkR
This prevents the spark.jars from being cleared while using `--packages` or `--jars`
cc pwendell davies brkyvz
Author: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Closes #6568 from shivaram/SPARK-8028 and squashes the following commits:
3a9cf1f [Shivaram Venkataraman] Use addJar instead of setJars in SparkR This prevents the spark.jars from being cleared
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6b44278e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6b44278e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6b44278e
Branch: refs/heads/master
Commit: 6b44278ef7cd2a278dfa67e8393ef30775c72726
Parents: 15d7c90
Author: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Authored: Mon Jun 1 21:01:14 2015 -0700
Committer: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Committed: Mon Jun 1 21:01:14 2015 -0700
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/api/r/RRDD.scala | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/6b44278e/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
index e020458..4dfa732 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
@@ -355,7 +355,6 @@ private[r] object RRDD {
val sparkConf = new SparkConf().setAppName(appName)
.setSparkHome(sparkHome)
- .setJars(jars)
// Override `master` if we have a user-specified value
if (master != "") {
@@ -373,7 +372,11 @@ private[r] object RRDD {
sparkConf.setExecutorEnv(name.asInstanceOf[String], value.asInstanceOf[String])
}
- new JavaSparkContext(sparkConf)
+ val jsc = new JavaSparkContext(sparkConf)
+ jars.foreach { jar =>
+ jsc.addJar(jar)
+ }
+ jsc
}
/**
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org