You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/14 07:32:48 UTC

[6/7] git commit: Wording changes per Patrick

Wording changes per Patrick


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/83993414
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/83993414
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/83993414

Branch: refs/heads/master
Commit: 839934140f1a518acae8c60fe82c2253f911ea33
Parents: a1f0992
Author: Andrew Or <an...@gmail.com>
Authored: Mon Jan 13 20:51:38 2014 -0800
Committer: Andrew Or <an...@gmail.com>
Committed: Mon Jan 13 20:51:38 2014 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/ui/jobs/ExecutorTable.scala    |  4 ++--
 .../main/scala/org/apache/spark/ui/jobs/StagePage.scala   | 10 +++++-----
 2 files changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/83993414/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
index 7b73253..ab03eb5 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
@@ -48,8 +48,8 @@ private[spark] class ExecutorTable(val parent: JobProgressUI, val stageId: Int)
         <th>Succeeded Tasks</th>
         <th>Shuffle Read</th>
         <th>Shuffle Write</th>
-        <th>Bytes Spilled (Memory)</th>
-        <th>Bytes Spilled (Disk)</th>
+        <th>Shuffle Spill (Memory)</th>
+        <th>Shuffle Spill (Disk)</th>
       </thead>
       <tbody>
         {createExecutorTable()}

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/83993414/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index 8f89fad..113f76b 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -86,11 +86,11 @@ private[spark] class StagePage(parent: JobProgressUI) {
             }
             {if (hasBytesSpilled)
             <li>
-              <strong>Bytes spilled (memory): </strong>
+              <strong>Shuffle spill (memory): </strong>
               {Utils.bytesToString(memoryBytesSpilled)}
             </li>
             <li>
-              <strong>Bytes spilled (disk): </strong>
+              <strong>Shuffle spill (disk): </strong>
               {Utils.bytesToString(diskBytesSpilled)}
             </li>
             }
@@ -102,7 +102,7 @@ private[spark] class StagePage(parent: JobProgressUI) {
         Seq("Duration", "GC Time", "Result Ser Time") ++
         {if (hasShuffleRead) Seq("Shuffle Read")  else Nil} ++
         {if (hasShuffleWrite) Seq("Write Time", "Shuffle Write") else Nil} ++
-        {if (hasBytesSpilled) Seq("Bytes Spilled (Memory)", "Bytes Spilled (Disk)") else Nil} ++
+        {if (hasBytesSpilled) Seq("Shuffle Spill (Memory)", "Shuffle Spill (Disk)") else Nil} ++
         Seq("Errors")
 
       val taskTable = listingTable(taskHeaders, taskRow(hasShuffleRead, hasShuffleWrite, hasBytesSpilled), tasks)
@@ -171,14 +171,14 @@ private[spark] class StagePage(parent: JobProgressUI) {
             case(info, metrics, exception) =>
               metrics.get.memoryBytesSpilled.toDouble
           }
-          val memoryBytesSpilledQuantiles = "Bytes spilled (memory)" +:
+          val memoryBytesSpilledQuantiles = "Shuffle spill (memory)" +:
             getQuantileCols(memoryBytesSpilledSizes)
 
           val diskBytesSpilledSizes = validTasks.map {
             case(info, metrics, exception) =>
               metrics.get.diskBytesSpilled.toDouble
           }
-          val diskBytesSpilledQuantiles = "Bytes spilled (disk)" +:
+          val diskBytesSpilledQuantiles = "Shuffle spill (disk)" +:
             getQuantileCols(diskBytesSpilledSizes)
 
           val listings: Seq[Seq[String]] = Seq(