You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2015/05/30 07:19:18 UTC

spark git commit: [SPARK-7957] Preserve partitioning when using randomSplit

Repository: spark
Updated Branches:
  refs/heads/master 3792d2583 -> 7ed06c399


[SPARK-7957] Preserve partitioning when using randomSplit

cc JoshRosen
Thanks for noticing this!

Author: Burak Yavuz <br...@gmail.com>

Closes #6509 from brkyvz/sample-perf-reg and squashes the following commits:

497465d [Burak Yavuz] addressed code review
293f95f [Burak Yavuz] [SPARK-7957] Preserve partitioning when using randomSplit


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7ed06c39
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7ed06c39
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7ed06c39

Branch: refs/heads/master
Commit: 7ed06c39922ac90acab3a78ce0f2f21184ed68a5
Parents: 3792d25
Author: Burak Yavuz <br...@gmail.com>
Authored: Fri May 29 22:19:15 2015 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Fri May 29 22:19:15 2015 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/rdd/RDD.scala | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/7ed06c39/core/src/main/scala/org/apache/spark/rdd/RDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 5fcef25..10610f4 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -434,11 +434,11 @@ abstract class RDD[T: ClassTag](
    * @return A random sub-sample of the RDD without replacement.
    */
   private[spark] def randomSampleWithRange(lb: Double, ub: Double, seed: Long): RDD[T] = {
-    this.mapPartitionsWithIndex { case (index, partition) =>
+    this.mapPartitionsWithIndex( { (index, partition) =>
       val sampler = new BernoulliCellSampler[T](lb, ub)
       sampler.setSeed(seed + index)
       sampler.sample(partition)
-    }
+    }, preservesPartitioning = true)
   }
 
   /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org