You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2015/05/08 23:13:01 UTC

spark git commit: [SPARK-7378] [CORE] Handle deep links to unloaded apps.

Repository: spark
Updated Branches:
  refs/heads/master 9042f8f37 -> 5467c34c3


[SPARK-7378] [CORE] Handle deep links to unloaded apps.

The code was treating deep links as if they were attempt IDs, so
for example if you tried to load "/history/app1/jobs" directly,
that would fail because the code would treat "jobs" as an attempt id.

This change modifies the code to try both cases - first without an
attempt id, then with it, so that deep links are handled correctly.
This assumes that the links in the Spark UI do not clash with the
attempt id namespace, though, which is the case for YARN at least,
which is the only backend that currently publishes attempt IDs.

Author: Marcelo Vanzin <va...@cloudera.com>

Closes #5922 from vanzin/SPARK-7378 and squashes the following commits:

96f648b [Marcelo Vanzin] Fix comparison.
ed3bcd4 [Marcelo Vanzin] Merge branch 'master' into SPARK-7378
23483e4 [Marcelo Vanzin] Fat fingers.
b728f08 [Marcelo Vanzin] [SPARK-7378] [core] Handle deep links to unloaded apps.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5467c34c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5467c34c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5467c34c

Branch: refs/heads/master
Commit: 5467c34c3d6538e053957b5513df218f1f5bae6b
Parents: 9042f8f
Author: Marcelo Vanzin <va...@cloudera.com>
Authored: Fri May 8 14:12:58 2015 -0700
Committer: Andrew Or <an...@databricks.com>
Committed: Fri May 8 14:12:58 2015 -0700

----------------------------------------------------------------------
 .../spark/deploy/history/HistoryServer.scala    | 48 ++++++++++++--------
 1 file changed, 29 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5467c34c/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index fc5182d..517cbe5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -83,31 +83,27 @@ class HistoryServer(
         return
       }
 
-      val appKey =
-        if (parts.length == 3) {
-          s"${parts(1)}/${parts(2)}"
-        } else {
-          parts(1)
+      val appId = parts(1)
+      val attemptId = if (parts.length >= 3) Some(parts(2)) else None
+
+      // Since we may have applications with multiple attempts mixed with applications with a
+      // single attempt, we need to try both. Try the single-attempt route first, and if an
+      // error is raised, then try the multiple attempt route.
+      if (!loadAppUi(appId, None) && (!attemptId.isDefined || !loadAppUi(appId, attemptId))) {
+        val msg = <div class="row-fluid">Application {appId} not found.</div>
+        res.setStatus(HttpServletResponse.SC_NOT_FOUND)
+        UIUtils.basicSparkPage(msg, "Not Found").foreach { n =>
+          res.getWriter().write(n.toString)
         }
+        return
+      }
 
       // Note we don't use the UI retrieved from the cache; the cache loader above will register
       // the app's UI, and all we need to do is redirect the user to the same URI that was
       // requested, and the proper data should be served at that point.
-      try {
-        appCache.get(appKey)
-        res.sendRedirect(res.encodeRedirectURL(req.getRequestURI()))
-      } catch {
-        case e: Exception => e.getCause() match {
-          case nsee: NoSuchElementException =>
-            val msg = <div class="row-fluid">Application {appKey} not found.</div>
-            res.setStatus(HttpServletResponse.SC_NOT_FOUND)
-            UIUtils.basicSparkPage(msg, "Not Found").foreach(
-              n => res.getWriter().write(n.toString))
-
-          case cause: Exception => throw cause
-        }
-      }
+      res.sendRedirect(res.encodeRedirectURL(req.getRequestURI()))
     }
+
     // SPARK-5983 ensure TRACE is not supported
     protected override def doTrace(req: HttpServletRequest, res: HttpServletResponse): Unit = {
       res.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED)
@@ -183,6 +179,20 @@ class HistoryServer(
    */
   def getProviderConfig(): Map[String, String] = provider.getConfig()
 
+  private def loadAppUi(appId: String, attemptId: Option[String]): Boolean = {
+    try {
+      appCache.get(appId + attemptId.map { id => s"/$id" }.getOrElse(""))
+      true
+    } catch {
+      case e: Exception => e.getCause() match {
+        case nsee: NoSuchElementException =>
+          false
+
+        case cause: Exception => throw cause
+      }
+    }
+  }
+
 }
 
 /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org