You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/07/26 19:27:18 UTC

git commit: [SPARK-2279] Added emptyRDD method to Java API

Repository: spark
Updated Branches:
  refs/heads/master 75663b57f -> c183b92c3


[SPARK-2279] Added emptyRDD method to Java API

Added emptyRDD method to Java API with tests.

Author: bpaulin <bo...@bobpaulin.com>

Closes #1597 from bobpaulin/SPARK-2279 and squashes the following commits:

5ad57c2 [bpaulin] [SPARK-2279] Added emptyRDD method to Java API


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c183b92c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c183b92c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c183b92c

Branch: refs/heads/master
Commit: c183b92c3c70ad2d36a2d60bdb10c02b65bc0212
Parents: 75663b5
Author: bpaulin <bo...@bobpaulin.com>
Authored: Sat Jul 26 10:27:09 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sat Jul 26 10:27:09 2014 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/api/java/JavaSparkContext.scala  | 9 ++++++++-
 core/src/test/java/org/apache/spark/JavaAPISuite.java       | 9 +++++++--
 2 files changed, 15 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c183b92c/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index a678355..8a5f808 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -34,7 +34,7 @@ import org.apache.spark._
 import org.apache.spark.SparkContext.{DoubleAccumulatorParam, IntAccumulatorParam}
 import org.apache.spark.api.java.JavaSparkContext.fakeClassTag
 import org.apache.spark.broadcast.Broadcast
-import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.{EmptyRDD, RDD}
 
 /**
  * A Java-friendly version of [[org.apache.spark.SparkContext]] that returns
@@ -135,6 +135,13 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
     sc.parallelize(JavaConversions.asScalaBuffer(list), numSlices)
   }
 
+  /** Get an RDD that has no partitions or elements. */
+  def emptyRDD[T]: JavaRDD[T] = {
+    implicit val ctag: ClassTag[T] = fakeClassTag
+    JavaRDD.fromRDD(new EmptyRDD[T](sc))
+  }
+
+
   /** Distribute a local Scala collection to form an RDD. */
   def parallelize[T](list: java.util.List[T]): JavaRDD[T] =
     parallelize(list, sc.defaultParallelism)

http://git-wip-us.apache.org/repos/asf/spark/blob/c183b92c/core/src/test/java/org/apache/spark/JavaAPISuite.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index b2868b5..f882a86 100644
--- a/core/src/test/java/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java
@@ -118,8 +118,7 @@ public class JavaAPISuite implements Serializable {
     JavaRDD<Integer> intersections = s1.intersection(s2);
     Assert.assertEquals(3, intersections.count());
 
-    List<Integer> list = new ArrayList<Integer>();
-    JavaRDD<Integer> empty = sc.parallelize(list);
+    JavaRDD<Integer> empty = sc.emptyRDD();
     JavaRDD<Integer> emptyIntersection = empty.intersection(s2);
     Assert.assertEquals(0, emptyIntersection.count());
 
@@ -185,6 +184,12 @@ public class JavaAPISuite implements Serializable {
   }
 
   @Test
+  public void emptyRDD() {
+    JavaRDD<String> rdd = sc.emptyRDD();
+    Assert.assertEquals("Empty RDD shouldn't have any values", 0, rdd.count());
+  }
+
+  @Test
   public void sortBy() {
     List<Tuple2<Integer, Integer>> pairs = new ArrayList<Tuple2<Integer, Integer>>();
     pairs.add(new Tuple2<Integer, Integer>(0, 4));