You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2017/08/02 04:59:05 UTC

spark git commit: [SPARK-21578][CORE] Add JavaSparkContextSuite

Repository: spark
Updated Branches:
  refs/heads/master 4cc704b12 -> 14e75758a


[SPARK-21578][CORE] Add JavaSparkContextSuite

## What changes were proposed in this pull request?

Due to SI-8479, [SPARK-1093](https://issues.apache.org/jira/browse/SPARK-21578) introduced redundant [SparkContext constructors](https://github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/SparkContext.scala#L148-L181). However, [SI-8479](https://issues.scala-lang.org/browse/SI-8479) is already fixed in Scala 2.10.5 and Scala 2.11.1.

The real reason to provide this constructor is that Java code can access `SparkContext` directly. It's Scala behavior, SI-4278. So, this PR adds an explicit testsuite, `JavaSparkContextSuite`  to prevent future regression, and fixes the outdate comment, too.

## How was this patch tested?

Pass the Jenkins with a new test suite.

Author: Dongjoon Hyun <do...@apache.org>

Closes #18778 from dongjoon-hyun/SPARK-21578.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/14e75758
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/14e75758
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/14e75758

Branch: refs/heads/master
Commit: 14e75758ab1ea19da377d30ccc2297b7082af651
Parents: 4cc704b
Author: Dongjoon Hyun <do...@apache.org>
Authored: Tue Aug 1 21:59:02 2017 -0700
Committer: gatorsmile <ga...@gmail.com>
Committed: Tue Aug 1 21:59:02 2017 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/SparkContext.scala   |  5 +-
 .../org/apache/spark/JavaSparkContextSuite.java | 62 ++++++++++++++++++++
 2 files changed, 64 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/14e75758/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index e1ce66a..5316468 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -145,9 +145,8 @@ class SparkContext(config: SparkConf) extends Logging {
     this(SparkContext.updatedConf(new SparkConf(), master, appName, sparkHome, jars, environment))
   }
 
-  // NOTE: The below constructors could be consolidated using default arguments. Due to
-  // Scala bug SI-8479, however, this causes the compile step to fail when generating docs.
-  // Until we have a good workaround for that bug the constructors remain broken out.
+  // The following constructors are required when Java code accesses SparkContext directly.
+  // Please see SI-4278
 
   /**
    * Alternative constructor that allows setting common Spark properties directly

http://git-wip-us.apache.org/repos/asf/spark/blob/14e75758/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java b/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java
new file mode 100644
index 0000000..7e9cc70
--- /dev/null
+++ b/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package test.org.apache.spark;
+
+import java.io.*;
+
+import scala.collection.immutable.List;
+import scala.collection.immutable.List$;
+import scala.collection.immutable.Map;
+import scala.collection.immutable.Map$;
+
+import org.junit.Test;
+
+import org.apache.spark.api.java.*;
+import org.apache.spark.*;
+
+/**
+ * Java apps can uses both Java-friendly JavaSparkContext and Scala SparkContext.
+ */
+public class JavaSparkContextSuite implements Serializable {
+
+  @Test
+  public void javaSparkContext() {
+    String[] jars = new String[] {};
+    java.util.Map<String, String> environment = new java.util.HashMap<>();
+
+    new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
+    new JavaSparkContext("local", "name", new SparkConf()).stop();
+    new JavaSparkContext("local", "name").stop();
+    new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
+    new JavaSparkContext("local", "name", "sparkHome", jars).stop();
+    new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
+  }
+
+  @Test
+  public void scalaSparkContext() {
+    List<String> jars = List$.MODULE$.empty();
+    Map<String, String> environment = Map$.MODULE$.empty();
+
+    new SparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
+    new SparkContext("local", "name", new SparkConf()).stop();
+    new SparkContext("local", "name").stop();
+    new SparkContext("local", "name", "sparkHome").stop();
+    new SparkContext("local", "name", "sparkHome", jars).stop();
+    new SparkContext("local", "name", "sparkHome", jars, environment).stop();
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org