You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2018/12/11 02:34:56 UTC

[GitHub] HyukjinKwon commented on a change in pull request #23260: [SPARK-26311][YARN] New feature: custom log URL for stdout/stderr

HyukjinKwon commented on a change in pull request #23260: [SPARK-26311][YARN] New feature: custom log URL for stdout/stderr
URL: https://github.com/apache/spark/pull/23260#discussion_r240455039
 
 

 ##########
 File path: resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala
 ##########
 @@ -246,13 +246,56 @@ private[yarn] class ExecutorRunnable(
       sys.env.get("SPARK_USER").foreach { user =>
         val containerId = ConverterUtils.toString(c.getId)
         val address = c.getNodeHttpAddress
-        val baseUrl = s"$httpScheme$address/node/containerlogs/$containerId/$user"
 
-        env("SPARK_LOG_URL_STDERR") = s"$baseUrl/stderr?start=-4096"
-        env("SPARK_LOG_URL_STDOUT") = s"$baseUrl/stdout?start=-4096"
+        sparkConf.get(config.CUSTOM_LOG_URL) match {
+          case Some(customUrl) =>
+            val pathVariables = ExecutorRunnable.buildPathVariables(httpScheme, address,
+              YarnConfiguration.getClusterId(conf), containerId, user)
+            val envNameToFileNameMap = Map("SPARK_LOG_URL_STDERR" -> "stderr",
+              "SPARK_LOG_URL_STDOUT" -> "stdout")
+            val logUrls = ExecutorRunnable.replaceLogUrls(customUrl, pathVariables,
+              envNameToFileNameMap)
+
+            logUrls.foreach { case (envName, url) =>
+              env(envName) = url
+            }
+          case None =>
+            val baseUrl = s"$httpScheme$address/node/containerlogs/$containerId/$user"
+            env("SPARK_LOG_URL_STDERR") = s"$baseUrl/stderr?start=-4096"
+            env("SPARK_LOG_URL_STDOUT") = s"$baseUrl/stdout?start=-4096"
+          }
       }
     }
 
     env
   }
 }
+
+private[yarn] object ExecutorRunnable {
+  val LOG_URL_PATTERN_HTTP_SCHEME = "{{HttpScheme}}"
+  val LOG_URL_PATTERN_NODE_HTTP_ADDRESS = "{{NodeHttpAddress}}"
+  val LOG_URL_PATTERN_CLUSTER_ID = "{{ClusterId}}"
+  val LOG_URL_PATTERN_CONTAINER_ID = "{{ContainerId}}"
+  val LOG_URL_PATTERN_USER = "{{User}}"
+  val LOG_URL_PATTERN_FILE_NAME = "{{FileName}}"
+
+  def buildPathVariables(httpScheme: String, nodeHttpAddress: String, clusterId: String,
 
 Review comment:
   Strictly, the guide line itself does not explicitly mention about two lines - it says it's okay if it fits within 2 lines (see https://github.com/databricks/scala-style-guide/pull/64#issuecomment-344609090). I intentionally avoided this because some of codes in some components do not comply this.
   
   However, strictly we should better stick to two line indentation whenever possible per https://github.com/databricks/scala-style-guide#indent
   
   > Use 2-space indentation in general.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org