You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/03/07 02:57:48 UTC

git commit: Small clean-up to flatmap tests

Repository: spark
Updated Branches:
  refs/heads/master 9ae919c02 -> 33baf14b0


Small clean-up to flatmap tests


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/33baf14b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/33baf14b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/33baf14b

Branch: refs/heads/master
Commit: 33baf14b04bcb5cb8dc39ae0773b9e0ef79ef9cf
Parents: 9ae919c
Author: Patrick Wendell <pw...@gmail.com>
Authored: Thu Mar 6 17:57:31 2014 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Thu Mar 6 17:57:31 2014 -0800

----------------------------------------------------------------------
 .../org/apache/spark/storage/FlatmapIteratorSuite.scala  | 11 +++--------
 1 file changed, 3 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/33baf14b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
index b843b4c..bcf138b 100644
--- a/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
@@ -33,34 +33,29 @@ class FlatmapIteratorSuite extends FunSuite with LocalSparkContext {
    * info from the serializer, and allow old objects to be GC'd
    */
   test("Flatmap Iterator to Disk") {
-    val sconf = new SparkConf().setMaster("local-cluster[1,1,512]")
-      .setAppName("iterator_to_disk_test")
+    val sconf = new SparkConf().setMaster("local").setAppName("iterator_to_disk_test")
     sc = new SparkContext(sconf)
     val expand_size = 100
     val data = sc.parallelize((1 to 5).toSeq).
       flatMap( x => Stream.range(0, expand_size))
     var persisted = data.persist(StorageLevel.DISK_ONLY)
-    println(persisted.count())
     assert(persisted.count()===500)
     assert(persisted.filter(_==1).count()===5)
   }
 
   test("Flatmap Iterator to Memory") {
-    val sconf = new SparkConf().setMaster("local-cluster[1,1,512]")
-      .setAppName("iterator_to_disk_test")
+    val sconf = new SparkConf().setMaster("local").setAppName("iterator_to_disk_test")
     sc = new SparkContext(sconf)
     val expand_size = 100
     val data = sc.parallelize((1 to 5).toSeq).
       flatMap(x => Stream.range(0, expand_size))
     var persisted = data.persist(StorageLevel.MEMORY_ONLY)
-    println(persisted.count())
     assert(persisted.count()===500)
     assert(persisted.filter(_==1).count()===5)
   }
 
   test("Serializer Reset") {
-    val sconf = new SparkConf().setMaster("local-cluster[1,1,512]")
-      .setAppName("serializer_reset_test")
+    val sconf = new SparkConf().setMaster("local").setAppName("serializer_reset_test")
       .set("spark.serializer.objectStreamReset", "10")
     sc = new SparkContext(sconf)
     val expand_size = 500