You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ad...@apache.org on 2014/05/26 05:13:41 UTC

git commit: HOTFIX: Add no-arg SparkContext constructor in Java

Repository: spark
Updated Branches:
  refs/heads/master c3576ffcd -> b6d22af04


HOTFIX: Add no-arg SparkContext constructor in Java

Self explanatory.

Author: Patrick Wendell <pw...@gmail.com>

Closes #878 from pwendell/java-constructor and squashes the following commits:

2cc1605 [Patrick Wendell] HOTFIX: Add no-arg SparkContext constructor in Java


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b6d22af0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b6d22af0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b6d22af0

Branch: refs/heads/master
Commit: b6d22af040073cd611b0fcfdf8a5259c0dfd854c
Parents: c3576ff
Author: Patrick Wendell <pw...@gmail.com>
Authored: Sun May 25 20:13:32 2014 -0700
Committer: Aaron Davidson <aa...@databricks.com>
Committed: Sun May 25 20:13:32 2014 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/api/java/JavaSparkContext.scala     | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b6d22af0/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index a7cfee6..1e0493c 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -42,6 +42,12 @@ import org.apache.spark.rdd.RDD
  */
 class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWorkaround {
   /**
+   * Create a JavaSparkContext that loads settings from system properties (for instance, when
+   * launching with ./bin/spark-submit).
+   */
+  def this() = this(new SparkContext())
+
+  /**
    * @param conf a [[org.apache.spark.SparkConf]] object specifying Spark parameters
    */
   def this(conf: SparkConf) = this(new SparkContext(conf))