You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/21 09:09:58 UTC

[09/10] git commit: Style clean-up

Style clean-up


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/a9bcc980
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/a9bcc980
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/a9bcc980

Branch: refs/heads/master
Commit: a9bcc980b693bf5b0959caccf74367fc70348041
Parents: a917a87
Author: Patrick Wendell <pw...@gmail.com>
Authored: Mon Jan 20 23:42:24 2014 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Tue Jan 21 00:05:28 2014 -0800

----------------------------------------------------------------------
 .../org/apache/spark/storage/ShuffleBlockManager.scala   |  9 ++++++---
 .../spark/util/collection/ExternalAppendOnlyMap.scala    | 11 +++--------
 2 files changed, 9 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/a9bcc980/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
index 173c329..bb07c8c 100644
--- a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
@@ -109,9 +109,12 @@ class ShuffleBlockManager(blockManager: BlockManager) extends Logging {
           val blockFile = blockManager.diskBlockManager.getFile(blockId)
           // Because of previous failures, the shuffle file may already exist on this machine.
           // If so, remove it.
-          if (blockFile.exists()) {
-            val removed = blockFile.delete()
-            logInfo(s"Removed existing shuffle file $blockFile successfully: $removed")
+          if (blockFile.exists) {
+            if (blockFile.delete()) {
+              logInfo(s"Removed existing shuffle file $blockFile")
+            } else {
+              logWarning(s"Failed to remove existing shuffle file $blockFile")
+            }
           }
           blockManager.getDiskWriter(blockId, blockFile, serializer, bufferSize)
         }

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/a9bcc980/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
index 792f29d..fb73636 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
@@ -173,15 +173,10 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
      * most likely require using the file channel API.
      */
 
-    val codec = new LZFCompressionCodec(sparkConf)
-
+    val shouldCompress = blockManager.shouldCompress(blockId)
+    val compressionCodec = new LZFCompressionCodec(sparkConf)
     def wrapForCompression(outputStream: OutputStream) = {
-      blockManager.shouldCompress(blockId) match {
-        case true =>
-          codec.compressedOutputStream(outputStream)
-        case false =>
-          outputStream
-      }
+      if (shouldCompress) compressionCodec.compressedOutputStream(outputStream) else outputStream
     }
 
     def getNewWriter = new DiskBlockObjectWriter(blockId, file, serializer, fileBufferSize,