You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2013/11/18 03:53:23 UTC

[1/2] git commit: Merge pull request #166 from ahirreddy/simr-spark-ui

Updated Branches:
  refs/heads/branch-0.8 e134ed5d6 -> 37126e8c9


Merge pull request #166 from ahirreddy/simr-spark-ui

SIMR Backend Scheduler will now write Spark UI URL to HDFS, which is to ...

...be retrieved by SIMR clients

(cherry picked from commit 39af914b273e35ff431844951ee8dfadcbc0c400)
Signed-off-by: Reynold Xin <rx...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/88230579
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/88230579
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/88230579

Branch: refs/heads/branch-0.8
Commit: 8823057992b5bbad760ee594da6f44457b63991f
Parents: e134ed5
Author: Matei Zaharia <ma...@eecs.berkeley.edu>
Authored: Wed Nov 13 08:39:05 2013 -0800
Committer: Reynold Xin <rx...@apache.org>
Committed: Sun Nov 17 18:53:04 2013 -0800

----------------------------------------------------------------------
 .../spark/scheduler/cluster/SimrSchedulerBackend.scala  | 12 ++++++++++++
 1 file changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/88230579/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
index d78bdba..6b91935 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
@@ -31,6 +31,10 @@ private[spark] class SimrSchedulerBackend(
   val tmpPath = new Path(driverFilePath + "_tmp")
   val filePath = new Path(driverFilePath)
 
+  val uiFilePath = driverFilePath + "_ui"
+  val tmpUiPath = new Path(uiFilePath + "_tmp")
+  val uiPath = new Path(uiFilePath)
+
   val maxCores = System.getProperty("spark.simr.executor.cores", "1").toInt
 
   override def start() {
@@ -45,6 +49,8 @@ private[spark] class SimrSchedulerBackend(
 
     logInfo("Writing to HDFS file: "  + driverFilePath)
     logInfo("Writing Akka address: "  + driverUrl)
+    logInfo("Writing to HDFS file: "  + uiFilePath)
+    logInfo("Writing Spark UI Address: " + sc.ui.appUIAddress)
 
     // Create temporary file to prevent race condition where executors get empty driverUrl file
     val temp = fs.create(tmpPath, true)
@@ -54,6 +60,12 @@ private[spark] class SimrSchedulerBackend(
 
     // "Atomic" rename
     fs.rename(tmpPath, filePath)
+
+    // Write Spark UI Address to file
+    val uiTemp = fs.create(tmpUiPath, true)
+    uiTemp.writeUTF(sc.ui.appUIAddress)
+    uiTemp.close()
+    fs.rename(tmpUiPath, uiPath)
   }
 
   override def stop() {


[2/2] git commit: Merge pull request #174 from ahirreddy/master

Posted by rx...@apache.org.
Merge pull request #174 from ahirreddy/master

Write Spark UI url to driver file on HDFS

This makes the SIMR code path simpler

(cherry picked from commit ed25105fd9733acd631dab0993560ac66ffeae16)
Signed-off-by: Reynold Xin <rx...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/37126e8c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/37126e8c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/37126e8c

Branch: refs/heads/branch-0.8
Commit: 37126e8c9013fc0f060a31bc8ffcaa1b2cbc9999
Parents: 8823057
Author: Matei Zaharia <ma...@eecs.berkeley.edu>
Authored: Thu Nov 14 19:43:55 2013 -0800
Committer: Reynold Xin <rx...@apache.org>
Committed: Sun Nov 17 18:53:16 2013 -0800

----------------------------------------------------------------------
 .../spark/scheduler/cluster/SimrSchedulerBackend.scala  | 12 +-----------
 1 file changed, 1 insertion(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/37126e8c/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
index 6b91935..0ea35e2 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala
@@ -31,10 +31,6 @@ private[spark] class SimrSchedulerBackend(
   val tmpPath = new Path(driverFilePath + "_tmp")
   val filePath = new Path(driverFilePath)
 
-  val uiFilePath = driverFilePath + "_ui"
-  val tmpUiPath = new Path(uiFilePath + "_tmp")
-  val uiPath = new Path(uiFilePath)
-
   val maxCores = System.getProperty("spark.simr.executor.cores", "1").toInt
 
   override def start() {
@@ -49,23 +45,17 @@ private[spark] class SimrSchedulerBackend(
 
     logInfo("Writing to HDFS file: "  + driverFilePath)
     logInfo("Writing Akka address: "  + driverUrl)
-    logInfo("Writing to HDFS file: "  + uiFilePath)
     logInfo("Writing Spark UI Address: " + sc.ui.appUIAddress)
 
     // Create temporary file to prevent race condition where executors get empty driverUrl file
     val temp = fs.create(tmpPath, true)
     temp.writeUTF(driverUrl)
     temp.writeInt(maxCores)
+    temp.writeUTF(sc.ui.appUIAddress)
     temp.close()
 
     // "Atomic" rename
     fs.rename(tmpPath, filePath)
-
-    // Write Spark UI Address to file
-    val uiTemp = fs.create(tmpUiPath, true)
-    uiTemp.writeUTF(sc.ui.appUIAddress)
-    uiTemp.close()
-    fs.rename(tmpUiPath, uiPath)
   }
 
   override def stop() {