You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by db...@apache.org on 2019/09/13 21:28:53 UTC
[spark] branch master updated: [SPARK-29064][CORE] Add
PrometheusResource to export Executor metrics
This is an automated email from the ASF dual-hosted git repository.
dbtsai pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new bbfaadb [SPARK-29064][CORE] Add PrometheusResource to export Executor metrics
bbfaadb is described below
commit bbfaadb280a80b511a98d18881641c6d9851dd51
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Fri Sep 13 21:28:27 2019 +0000
[SPARK-29064][CORE] Add PrometheusResource to export Executor metrics
### What changes were proposed in this pull request?
At Apache Spark 3.0.0, [SPARK-23429](https://github.com/apache/spark/pull/21221) added the ability to collect executor metrics via heartbeats and to expose it as a REST API. This PR aims to extend it to support `Prometheus` format additionally.
### Why are the changes needed?
Prometheus.io is a CNCF project used widely with K8s.
- https://github.com/prometheus/prometheus
### Does this PR introduce any user-facing change?
Yes. New web interfaces are added along with the existing JSON API.
| | JSON End Point | Prometheus End Point |
| ------- | ------------------------------------ | --------------------------------- |
| Driver | /api/v1/applications/{id}/executors/ | /metrics/executors/prometheus/ |
### How was this patch tested?
Manually connect to the new end-points with `curl` and compare with JSON.
**SETUP**
```
$ sbin/start-master.sh
$ sbin/start-slave.sh spark://`hostname`:7077
$ bin/spark-shell --master spark://`hostname`:7077 --conf spark.ui.prometheus.enabled=true
```
**JSON (existing after SPARK-23429)**
```
$ curl -s http://localhost:4040/api/v1/applications/app-20190911204823-0000/executors
[ {
"id" : "driver",
"hostPort" : "localhost:52615",
"isActive" : true,
"rddBlocks" : 0,
"memoryUsed" : 0,
"diskUsed" : 0,
"totalCores" : 0,
"maxTasks" : 0,
"activeTasks" : 0,
"failedTasks" : 0,
"completedTasks" : 0,
"totalTasks" : 0,
"totalDuration" : 0,
"totalGCTime" : 0,
"totalInputBytes" : 0,
"totalShuffleRead" : 0,
"totalShuffleWrite" : 0,
"isBlacklisted" : false,
"maxMemory" : 384093388,
"addTime" : "2019-09-12T03:48:23.875GMT",
"executorLogs" : { },
"memoryMetrics" : {
"usedOnHeapStorageMemory" : 0,
"usedOffHeapStorageMemory" : 0,
"totalOnHeapStorageMemory" : 384093388,
"totalOffHeapStorageMemory" : 0
},
"blacklistedInStages" : [ ],
"peakMemoryMetrics" : {
"JVMHeapMemory" : 229995952,
"JVMOffHeapMemory" : 145872280,
"OnHeapExecutionMemory" : 0,
"OffHeapExecutionMemory" : 0,
"OnHeapStorageMemory" : 0,
"OffHeapStorageMemory" : 0,
"OnHeapUnifiedMemory" : 0,
"OffHeapUnifiedMemory" : 0,
"DirectPoolMemory" : 75891,
"MappedPoolMemory" : 0,
"ProcessTreeJVMVMemory" : 0,
"ProcessTreeJVMRSSMemory" : 0,
"ProcessTreePythonVMemory" : 0,
"ProcessTreePythonRSSMemory" : 0,
"ProcessTreeOtherVMemory" : 0,
"ProcessTreeOtherRSSMemory" : 0,
"MinorGCCount" : 8,
"MinorGCTime" : 82,
"MajorGCCount" : 3,
"MajorGCTime" : 128
},
"attributes" : { },
"resources" : { }
}, {
"id" : "0",
"hostPort" : "127.0.0.1:52619",
"isActive" : true,
"rddBlocks" : 0,
"memoryUsed" : 0,
"diskUsed" : 0,
"totalCores" : 16,
"maxTasks" : 16,
"activeTasks" : 0,
"failedTasks" : 0,
"completedTasks" : 0,
"totalTasks" : 0,
"totalDuration" : 0,
"totalGCTime" : 0,
"totalInputBytes" : 0,
"totalShuffleRead" : 0,
"totalShuffleWrite" : 0,
"isBlacklisted" : false,
"maxMemory" : 384093388,
"addTime" : "2019-09-12T03:48:25.907GMT",
"executorLogs" : {
"stdout" : "http://127.0.0.1:8081/logPage/?appId=app-20190911204823-0000&executorId=0&logType=stdout",
"stderr" : "http://127.0.0.1:8081/logPage/?appId=app-20190911204823-0000&executorId=0&logType=stderr"
},
"memoryMetrics" : {
"usedOnHeapStorageMemory" : 0,
"usedOffHeapStorageMemory" : 0,
"totalOnHeapStorageMemory" : 384093388,
"totalOffHeapStorageMemory" : 0
},
"blacklistedInStages" : [ ],
"attributes" : { },
"resources" : { }
} ]
```
**Prometheus**
```
$ curl -s http://localhost:4040/metrics/executors/prometheus
metrics_app_20190911204823_0000_driver_executor_rddBlocks_Count 0
metrics_app_20190911204823_0000_driver_executor_memoryUsed_Count 0
metrics_app_20190911204823_0000_driver_executor_diskUsed_Count 0
metrics_app_20190911204823_0000_driver_executor_totalCores_Count 0
metrics_app_20190911204823_0000_driver_executor_maxTasks_Count 0
metrics_app_20190911204823_0000_driver_executor_activeTasks_Count 0
metrics_app_20190911204823_0000_driver_executor_failedTasks_Count 0
metrics_app_20190911204823_0000_driver_executor_completedTasks_Count 0
metrics_app_20190911204823_0000_driver_executor_totalTasks_Count 0
metrics_app_20190911204823_0000_driver_executor_totalDuration_Value 0
metrics_app_20190911204823_0000_driver_executor_totalGCTime_Value 0
metrics_app_20190911204823_0000_driver_executor_totalInputBytes_Count 0
metrics_app_20190911204823_0000_driver_executor_totalShuffleRead_Count 0
metrics_app_20190911204823_0000_driver_executor_totalShuffleWrite_Count 0
metrics_app_20190911204823_0000_driver_executor_maxMemory_Count 384093388
metrics_app_20190911204823_0000_driver_executor_usedOnHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_usedOffHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_totalOnHeapStorageMemory_Count 384093388
metrics_app_20190911204823_0000_driver_executor_totalOffHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_JVMHeapMemory_Count 230406336
metrics_app_20190911204823_0000_driver_executor_JVMOffHeapMemory_Count 146132592
metrics_app_20190911204823_0000_driver_executor_OnHeapExecutionMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_OffHeapExecutionMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_OnHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_OffHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_OnHeapUnifiedMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_OffHeapUnifiedMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_DirectPoolMemory_Count 97049
metrics_app_20190911204823_0000_driver_executor_MappedPoolMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_ProcessTreeJVMVMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_ProcessTreeJVMRSSMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_ProcessTreePythonVMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_ProcessTreePythonRSSMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_ProcessTreeOtherVMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_ProcessTreeOtherRSSMemory_Count 0
metrics_app_20190911204823_0000_driver_executor_MinorGCCount_Count 8
metrics_app_20190911204823_0000_driver_executor_MinorGCTime_Count 82
metrics_app_20190911204823_0000_driver_executor_MajorGCCount_Count 3
metrics_app_20190911204823_0000_driver_executor_MajorGCTime_Count 128
metrics_app_20190911204823_0000_0_executor_rddBlocks_Count 0
metrics_app_20190911204823_0000_0_executor_memoryUsed_Count 0
metrics_app_20190911204823_0000_0_executor_diskUsed_Count 0
metrics_app_20190911204823_0000_0_executor_totalCores_Count 16
metrics_app_20190911204823_0000_0_executor_maxTasks_Count 16
metrics_app_20190911204823_0000_0_executor_activeTasks_Count 0
metrics_app_20190911204823_0000_0_executor_failedTasks_Count 0
metrics_app_20190911204823_0000_0_executor_completedTasks_Count 0
metrics_app_20190911204823_0000_0_executor_totalTasks_Count 0
metrics_app_20190911204823_0000_0_executor_totalDuration_Value 0
metrics_app_20190911204823_0000_0_executor_totalGCTime_Value 0
metrics_app_20190911204823_0000_0_executor_totalInputBytes_Count 0
metrics_app_20190911204823_0000_0_executor_totalShuffleRead_Count 0
metrics_app_20190911204823_0000_0_executor_totalShuffleWrite_Count 0
metrics_app_20190911204823_0000_0_executor_maxMemory_Count 384093388
metrics_app_20190911204823_0000_0_executor_usedOnHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_0_executor_usedOffHeapStorageMemory_Count 0
metrics_app_20190911204823_0000_0_executor_totalOnHeapStorageMemory_Count 384093388
metrics_app_20190911204823_0000_0_executor_totalOffHeapStorageMemory_Count 0
```
Closes #25770 from dongjoon-hyun/SPARK-29064.
Authored-by: Dongjoon Hyun <dh...@apple.com>
Signed-off-by: DB Tsai <d_...@apple.com>
---
.../org/apache/spark/internal/config/UI.scala | 7 ++
.../spark/status/api/v1/PrometheusResource.scala | 110 +++++++++++++++++++++
.../main/scala/org/apache/spark/ui/SparkUI.scala | 3 +
3 files changed, 120 insertions(+)
diff --git a/core/src/main/scala/org/apache/spark/internal/config/UI.scala b/core/src/main/scala/org/apache/spark/internal/config/UI.scala
index a11970e..1a82681 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/UI.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/UI.scala
@@ -81,6 +81,13 @@ private[spark] object UI {
.booleanConf
.createWithDefault(true)
+ val UI_PROMETHEUS_ENABLED = ConfigBuilder("spark.ui.prometheus.enabled")
+ .internal()
+ .doc("Expose executor metrics at /metrics/executors/prometheus. " +
+ "For master/worker/driver metrics, you need to configure `conf/metrics.properties`.")
+ .booleanConf
+ .createWithDefault(false)
+
val UI_X_XSS_PROTECTION = ConfigBuilder("spark.ui.xXssProtection")
.doc("Value for HTTP X-XSS-Protection response header")
.stringConf
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/PrometheusResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/PrometheusResource.scala
new file mode 100644
index 0000000..6e52e21
--- /dev/null
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/PrometheusResource.scala
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.status.api.v1
+
+import javax.ws.rs._
+import javax.ws.rs.core.MediaType
+
+import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
+import org.glassfish.jersey.server.ServerProperties
+import org.glassfish.jersey.servlet.ServletContainer
+
+import org.apache.spark.ui.SparkUI
+
+/**
+ * This aims to expose Executor metrics like REST API which is documented in
+ *
+ * https://spark.apache.org/docs/3.0.0/monitoring.html#executor-metrics
+ *
+ * Note that this is based on ExecutorSummary which is different from ExecutorSource.
+ */
+@Path("/executors")
+private[v1] class PrometheusResource extends ApiRequestContext {
+ @GET
+ @Path("prometheus")
+ @Produces(Array(MediaType.TEXT_PLAIN))
+ def executors(): String = {
+ val sb = new StringBuilder
+ val store = uiRoot.asInstanceOf[SparkUI].store
+ val appId = store.applicationInfo.id.replaceAll("[^a-zA-Z0-9]", "_")
+ store.executorList(true).foreach { executor =>
+ val prefix = s"metrics_${appId}_${executor.id}_executor_"
+ sb.append(s"${prefix}rddBlocks_Count ${executor.rddBlocks}\n")
+ sb.append(s"${prefix}memoryUsed_Count ${executor.memoryUsed}\n")
+ sb.append(s"${prefix}diskUsed_Count ${executor.diskUsed}\n")
+ sb.append(s"${prefix}totalCores_Count ${executor.totalCores}\n")
+ sb.append(s"${prefix}maxTasks_Count ${executor.maxTasks}\n")
+ sb.append(s"${prefix}activeTasks_Count ${executor.activeTasks}\n")
+ sb.append(s"${prefix}failedTasks_Count ${executor.failedTasks}\n")
+ sb.append(s"${prefix}completedTasks_Count ${executor.completedTasks}\n")
+ sb.append(s"${prefix}totalTasks_Count ${executor.totalTasks}\n")
+ sb.append(s"${prefix}totalDuration_Value ${executor.totalDuration}\n")
+ sb.append(s"${prefix}totalGCTime_Value ${executor.totalGCTime}\n")
+ sb.append(s"${prefix}totalInputBytes_Count ${executor.totalInputBytes}\n")
+ sb.append(s"${prefix}totalShuffleRead_Count ${executor.totalShuffleRead}\n")
+ sb.append(s"${prefix}totalShuffleWrite_Count ${executor.totalShuffleWrite}\n")
+ sb.append(s"${prefix}maxMemory_Count ${executor.maxMemory}\n")
+ executor.executorLogs.foreach { case (k, v) => }
+ executor.memoryMetrics.foreach { m =>
+ sb.append(s"${prefix}usedOnHeapStorageMemory_Count ${m.usedOnHeapStorageMemory}\n")
+ sb.append(s"${prefix}usedOffHeapStorageMemory_Count ${m.usedOffHeapStorageMemory}\n")
+ sb.append(s"${prefix}totalOnHeapStorageMemory_Count ${m.totalOnHeapStorageMemory}\n")
+ sb.append(s"${prefix}totalOffHeapStorageMemory_Count ${m.totalOffHeapStorageMemory}\n")
+ }
+ executor.peakMemoryMetrics.foreach { m =>
+ val names = Array(
+ "JVMHeapMemory",
+ "JVMOffHeapMemory",
+ "OnHeapExecutionMemory",
+ "OffHeapExecutionMemory",
+ "OnHeapStorageMemory",
+ "OffHeapStorageMemory",
+ "OnHeapUnifiedMemory",
+ "OffHeapUnifiedMemory",
+ "DirectPoolMemory",
+ "MappedPoolMemory",
+ "ProcessTreeJVMVMemory",
+ "ProcessTreeJVMRSSMemory",
+ "ProcessTreePythonVMemory",
+ "ProcessTreePythonRSSMemory",
+ "ProcessTreeOtherVMemory",
+ "ProcessTreeOtherRSSMemory",
+ "MinorGCCount",
+ "MinorGCTime",
+ "MajorGCCount",
+ "MajorGCTime"
+ )
+ names.foreach { name =>
+ sb.append(s"$prefix${name}_Count ${m.getMetricValue(name)}\n")
+ }
+ }
+ }
+ sb.toString
+ }
+}
+
+private[spark] object PrometheusResource {
+ def getServletHandler(uiRoot: UIRoot): ServletContextHandler = {
+ val jerseyContext = new ServletContextHandler(ServletContextHandler.NO_SESSIONS)
+ jerseyContext.setContextPath("/metrics")
+ val holder: ServletHolder = new ServletHolder(classOf[ServletContainer])
+ holder.setInitParameter(ServerProperties.PROVIDER_PACKAGES, "org.apache.spark.status.api.v1")
+ UIRootFromServletContext.setUiRoot(jerseyContext, uiRoot)
+ jerseyContext.addServlet(holder, "/*")
+ jerseyContext
+ }
+}
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 6fb8e45..05e9c7f 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -66,6 +66,9 @@ private[spark] class SparkUI private (
addStaticHandler(SparkUI.STATIC_RESOURCE_DIR)
attachHandler(createRedirectHandler("/", "/jobs/", basePath = basePath))
attachHandler(ApiRootResource.getServletHandler(this))
+ if (sc.map(_.conf.get(UI_PROMETHEUS_ENABLED)).getOrElse(false)) {
+ attachHandler(PrometheusResource.getServletHandler(this))
+ }
// These should be POST only, but, the YARN AM proxy won't proxy POSTs
attachHandler(createRedirectHandler(
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org