You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/07/27 07:44:35 UTC

git commit: SPARK-2680: Lower spark.shuffle.memoryFraction to 0.2 by default

Repository: spark
Updated Branches:
  refs/heads/master ba46bbed5 -> b547f69bd


SPARK-2680: Lower spark.shuffle.memoryFraction to 0.2 by default

Author: Matei Zaharia <ma...@databricks.com>

Closes #1593 from mateiz/spark-2680 and squashes the following commits:

3c949c4 [Matei Zaharia] Lower spark.shuffle.memoryFraction to 0.2 by default


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b547f69b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b547f69b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b547f69b

Branch: refs/heads/master
Commit: b547f69bdb5f4a6d5f471a2d998c2df6fb2a9347
Parents: ba46bbe
Author: Matei Zaharia <ma...@databricks.com>
Authored: Sat Jul 26 22:44:17 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sat Jul 26 22:44:17 2014 -0700

----------------------------------------------------------------------
 .../org/apache/spark/util/collection/ExternalAppendOnlyMap.scala   | 2 +-
 docs/configuration.md                                              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b547f69b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
index be8f652..c22bb8d 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
@@ -74,7 +74,7 @@ class ExternalAppendOnlyMap[K, V, C](
 
   // Collective memory threshold shared across all running tasks
   private val maxMemoryThreshold = {
-    val memoryFraction = sparkConf.getDouble("spark.shuffle.memoryFraction", 0.3)
+    val memoryFraction = sparkConf.getDouble("spark.shuffle.memoryFraction", 0.2)
     val safetyFraction = sparkConf.getDouble("spark.shuffle.safetyFraction", 0.8)
     (Runtime.getRuntime.maxMemory * memoryFraction * safetyFraction).toLong
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/b547f69b/docs/configuration.md
----------------------------------------------------------------------
diff --git a/docs/configuration.md b/docs/configuration.md
index 4e4b781..46e3dd9 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -239,7 +239,7 @@ Apart from these, the following properties are also available, and may be useful
 </tr>
 <tr>
   <td><code>spark.shuffle.memoryFraction</code></td>
-  <td>0.3</td>
+  <td>0.2</td>
   <td>
     Fraction of Java heap to use for aggregation and cogroups during shuffles, if
     <code>spark.shuffle.spill</code> is true. At any given time, the collective size of