You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2019/07/31 20:25:04 UTC
[spark] branch master updated: [SPARK-28564][CORE] Access history
application defaults to the last attempt id
This is an automated email from the ASF dual-hosted git repository.
vanzin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 70ef906 [SPARK-28564][CORE] Access history application defaults to the last attempt id
70ef906 is described below
commit 70ef9064a8aa605b09e639d5a40528b063af25b7
Author: sychen <sy...@ctrip.com>
AuthorDate: Wed Jul 31 13:24:36 2019 -0700
[SPARK-28564][CORE] Access history application defaults to the last attempt id
## What changes were proposed in this pull request?
When we set ```spark.history.ui.maxApplications``` to a small value, we can't get some apps from the page search.
If the url is spliced (http://localhost:18080/history/local-xxx), it can be accessed if the app has no attempt.
But in the case of multiple attempted apps, such a url cannot be accessed, and the page displays Not Found.
## How was this patch tested?
Add UT
Closes #25301 from cxzl25/hs_app_last_attempt_id.
Authored-by: sychen <sy...@ctrip.com>
Signed-off-by: Marcelo Vanzin <va...@cloudera.com>
---
.../spark/deploy/history/HistoryServer.scala | 22 +++++++++--
.../spark/deploy/history/HistoryServerSuite.scala | 43 ++++++++++++++++++++++
2 files changed, 62 insertions(+), 3 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index 7df36c5..878f0cb 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -79,7 +79,18 @@ class HistoryServer(
}
val appId = parts(1)
- val attemptId = if (parts.length >= 3) Some(parts(2)) else None
+ var shouldAppendAttemptId = false
+ val attemptId = if (parts.length >= 3) {
+ Some(parts(2))
+ } else {
+ val lastAttemptId = provider.getApplicationInfo(appId).flatMap(_.attempts.head.attemptId)
+ if (lastAttemptId.isDefined) {
+ shouldAppendAttemptId = true
+ lastAttemptId
+ } else {
+ None
+ }
+ }
// Since we may have applications with multiple attempts mixed with applications with a
// single attempt, we need to try both. Try the single-attempt route first, and if an
@@ -97,8 +108,13 @@ class HistoryServer(
// the app's UI, and all we need to do is redirect the user to the same URI that was
// requested, and the proper data should be served at that point.
// Also, make sure that the redirect url contains the query string present in the request.
- val requestURI = req.getRequestURI + Option(req.getQueryString).map("?" + _).getOrElse("")
- res.sendRedirect(res.encodeRedirectURL(requestURI))
+ val redirect = if (shouldAppendAttemptId) {
+ req.getRequestURI.stripSuffix("/") + "/" + attemptId.get
+ } else {
+ req.getRequestURI
+ }
+ val query = Option(req.getQueryString).map("?" + _).getOrElse("")
+ res.sendRedirect(res.encodeRedirectURL(redirect + query))
}
// SPARK-5983 ensure TRACE is not supported
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index c12b71a..8548d39 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -632,6 +632,49 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
}
}
+ test("access history application defaults to the last attempt id") {
+
+ def getRedirectUrl(url: URL): (Int, String) = {
+ val connection = url.openConnection().asInstanceOf[HttpURLConnection]
+ connection.setRequestMethod("GET")
+ connection.setUseCaches(false)
+ connection.setDefaultUseCaches(false)
+ connection.setInstanceFollowRedirects(false)
+ connection.connect()
+ val code = connection.getResponseCode()
+ val location = connection.getHeaderField("Location")
+ (code, location)
+ }
+
+ def buildPageAttemptUrl(appId: String, attemptId: Option[Int]): URL = {
+ attemptId match {
+ case Some(id) =>
+ new URL(s"http://localhost:$port/history/$appId/$id")
+ case None =>
+ new URL(s"http://localhost:$port/history/$appId")
+ }
+ }
+
+ val oneAttemptAppId = "local-1430917381534"
+ HistoryServerSuite.getUrl(buildPageAttemptUrl(oneAttemptAppId, None))
+
+ val multiAttemptAppid = "local-1430917381535"
+ val lastAttemptId = Some(2)
+ val lastAttemptUrl = buildPageAttemptUrl(multiAttemptAppid, lastAttemptId)
+ Seq(None, Some(1), Some(2)).foreach { attemptId =>
+ val url = buildPageAttemptUrl(multiAttemptAppid, attemptId)
+ val (code, location) = getRedirectUrl(url)
+ assert(code === 302, s"Unexpected status code $code for $url")
+ attemptId match {
+ case None =>
+ assert(location.stripSuffix("/") === lastAttemptUrl.toString)
+ case _ =>
+ assert(location.stripSuffix("/") === url.toString)
+ }
+ HistoryServerSuite.getUrl(new URL(location))
+ }
+ }
+
def getContentAndCode(path: String, port: Int = port): (Int, Option[String], Option[String]) = {
HistoryServerSuite.getContentAndCode(new URL(s"http://localhost:$port/api/v1/$path"))
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org