You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/06 19:29:15 UTC
[2/3] git commit: Add warning to null setJars check
Add warning to null setJars check
Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/25446dd9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/25446dd9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/25446dd9
Branch: refs/heads/master
Commit: 25446dd931cce5916de5dddf4689b41ee6fd3148
Parents: ad35c1a
Author: Thomas Graves <tg...@apache.org>
Authored: Mon Jan 6 07:58:59 2014 -0600
Committer: Thomas Graves <tg...@apache.org>
Committed: Mon Jan 6 07:58:59 2014 -0600
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/SparkConf.scala | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/25446dd9/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 7073a99..55f2703 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -24,7 +24,7 @@ import com.typesafe.config.ConfigFactory
*
* @param loadDefaults whether to load values from the system properties and classpath
*/
-class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
+class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable with Logging {
/** Create a SparkConf that loads defaults from system properties and the classpath */
def this() = this(true)
@@ -67,6 +67,7 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
/** Set JAR files to distribute to the cluster. */
def setJars(jars: Seq[String]): SparkConf = {
+ for (jar <- jars if (jar == null)) logWarning("null jar passed to SparkContext constructor")
set("spark.jars", jars.filter(_ != null).mkString(","))
}