You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by cloud-fan <gi...@git.apache.org> on 2017/12/14 16:11:13 UTC
[GitHub] spark pull request #19681: [SPARK-20652][sql] Store SQL UI data in the new a...
Github user cloud-fan commented on a diff in the pull request:
https://github.com/apache/spark/pull/19681#discussion_r156987695
--- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusStore.scala ---
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.ui
+
+import java.lang.{Long => JLong}
+import java.util.Date
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ArrayBuffer
+
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize
+
+import org.apache.spark.{JobExecutionStatus, SparkConf}
+import org.apache.spark.scheduler.SparkListener
+import org.apache.spark.status.AppStatusPlugin
+import org.apache.spark.status.KVUtils.KVIndexParam
+import org.apache.spark.ui.SparkUI
+import org.apache.spark.util.Utils
+import org.apache.spark.util.kvstore.KVStore
+
+/**
+ * Provides a view of a KVStore with methods that make it easy to query SQL-specific state. There's
+ * no state kept in this class, so it's ok to have multiple instances of it in an application.
+ */
+private[sql] class SQLAppStatusStore(
+ store: KVStore,
+ listener: Option[SQLAppStatusListener] = None) {
+
+ def executionsList(): Seq[SQLExecutionUIData] = {
+ store.view(classOf[SQLExecutionUIData]).asScala.toSeq
+ }
+
+ def execution(executionId: Long): Option[SQLExecutionUIData] = {
+ try {
+ Some(store.read(classOf[SQLExecutionUIData], executionId))
+ } catch {
+ case _: NoSuchElementException => None
+ }
+ }
+
+ def executionsCount(): Long = {
+ store.count(classOf[SQLExecutionUIData])
+ }
+
+ def executionMetrics(executionId: Long): Map[Long, String] = {
+ def metricsFromStore(): Option[Map[Long, String]] = {
+ val exec = store.read(classOf[SQLExecutionUIData], executionId)
+ Option(exec.metricValues)
+ }
+
+ metricsFromStore()
+ .orElse(listener.flatMap(_.liveExecutionMetrics(executionId)))
+ // Try a second time in case the execution finished while this method is trying to
+ // get the metrics.
+ .orElse(metricsFromStore())
+ .getOrElse(Map())
+ }
+
+ def planGraph(executionId: Long): SparkPlanGraph = {
+ store.read(classOf[SparkPlanGraphWrapper], executionId).toSparkPlanGraph()
+ }
+
+}
+
+/**
+ * An AppStatusPlugin for handling the SQL UI and listeners.
+ */
+private[sql] class SQLAppStatusPlugin extends AppStatusPlugin {
+
+ override def setupListeners(
+ conf: SparkConf,
+ store: KVStore,
+ addListenerFn: SparkListener => Unit,
+ live: Boolean): Unit = {
+ // For live applications, the listener is installed in [[setupUI]]. This also avoids adding
+ // the listener when the UI is disabled. Force installation during testing, though.
+ if (!live || Utils.isTesting) {
+ val listener = new SQLAppStatusListener(conf, store, live, None)
+ addListenerFn(listener)
+ }
+ }
+
+ override def setupUI(ui: SparkUI): Unit = {
--- End diff --
Do we have a clear rule about when `setupListeners` is called and when `setupUI` is called?
Here we register `SQLAppStatusListener` in both `setupListeners` and `setupUI`, will we register it twice?
---
---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org