You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2021/10/21 01:02:04 UTC

[spark] branch branch-3.2 updated: [SPARK-37061][SQL] Fix CustomMetrics when using Inner Classes

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new b776709  [SPARK-37061][SQL] Fix CustomMetrics when using Inner Classes
b776709 is described below

commit b776709496d3289090235d5026f639dee45a2eb9
Author: Russell Spitzer <ru...@gmail.com>
AuthorDate: Wed Oct 20 17:59:10 2021 -0700

    [SPARK-37061][SQL] Fix CustomMetrics when using Inner Classes
    
    ### What changes were proposed in this pull request?
    Previously CustomMetrics use Class.getCanonicalName when attempting to get the
    class name of CustomMetric implementations. These names replace special characters
    for marking inner classes like ($) with ".". While those names are appropriate for
    referring to classes within source files, they will not work during reflection where
    the Class.getName output should be used.
    
    ### Why are the changes needed?
    InnerClasses could never be found in when they are used as Custom Metrics
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Tests modified so they access both an independent metric class as well as an inner class.
    
    Closes #34345 from RussellSpitzer/SPARK-37061.
    
    Authored-by: Russell Spitzer <ru...@gmail.com>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
    (cherry picked from commit 2ce551e3e14cbba09ab67bb54e8d79f5062312be)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 .../spark/sql/execution/metric/CustomMetrics.scala |  2 +-
 .../execution/ui/SQLAppStatusListenerSuite.scala   | 33 ++++++++++++++++++----
 2 files changed, 29 insertions(+), 6 deletions(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/CustomMetrics.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/CustomMetrics.scala
index 222a705..e0138b9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/CustomMetrics.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/CustomMetrics.scala
@@ -29,7 +29,7 @@ object CustomMetrics {
    * `CustomMetric`.
    */
   def buildV2CustomMetricTypeName(customMetric: CustomMetric): String = {
-    s"${V2_CUSTOM}_${customMetric.getClass.getCanonicalName}"
+    s"${V2_CUSTOM}_${customMetric.getClass.getName}"
   }
 
   /**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala
index bcb5892..e776a4a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala
@@ -854,9 +854,12 @@ class SQLAppStatusListenerSuite extends SharedSparkSession with JsonTestUtils
     val metrics = statusStore.executionMetrics(execId)
     val expectedMetric = physicalPlan.metrics("custom_metric")
     val expectedValue = "custom_metric: 12345, 12345"
-
+    val innerMetric = physicalPlan.metrics("inner_metric")
+    val expectedInnerValue = "inner_metric: 54321, 54321"
     assert(metrics.contains(expectedMetric.id))
     assert(metrics(expectedMetric.id) === expectedValue)
+    assert(metrics.contains(innerMetric.id))
+    assert(metrics(innerMetric.id) === expectedInnerValue)
   }
 
   test("SPARK-36030: Report metrics from Datasource v2 write") {
@@ -882,7 +885,9 @@ class SQLAppStatusListenerSuite extends SharedSparkSession with JsonTestUtils
       val execId = statusStore.executionsList().last.executionId
       val metrics = statusStore.executionMetrics(execId)
       val customMetric = metrics.find(_._2 == "custom_metric: 12345, 12345")
+      val innerMetric = metrics.find(_._2 == "inner_metric: 54321, 54321")
       assert(customMetric.isDefined)
+      assert(innerMetric.isDefined)
     }
   }
 }
@@ -960,6 +965,16 @@ class SQLAppStatusListenerMemoryLeakSuite extends SparkFunSuite {
   }
 }
 
+object Outer {
+  class InnerCustomMetric extends CustomMetric {
+    override def name(): String = "inner_metric"
+    override def description(): String = "a simple custom metric in an inner class"
+    override def aggregateTaskMetrics(taskMetrics: Array[Long]): String = {
+      s"inner_metric: ${taskMetrics.mkString(", ")}"
+    }
+  }
+}
+
 class SimpleCustomMetric extends CustomMetric {
   override def name(): String = "custom_metric"
   override def description(): String = "a simple custom metric"
@@ -989,7 +1004,11 @@ object CustomMetricReaderFactory extends PartitionReaderFactory {
           override def name(): String = "custom_metric"
           override def value(): Long = 12345
         }
-        Array(metric)
+        val innerMetric = new CustomTaskMetric {
+          override def name(): String = "inner_metric"
+          override def value(): Long = 54321;
+        }
+        Array(metric, innerMetric)
       }
     }
   }
@@ -1001,7 +1020,7 @@ class CustomMetricScanBuilder extends SimpleScanBuilder {
   }
 
   override def supportedCustomMetrics(): Array[CustomMetric] = {
-    Array(new SimpleCustomMetric)
+    Array(new SimpleCustomMetric, new Outer.InnerCustomMetric)
   }
 
   override def createReaderFactory(): PartitionReaderFactory = CustomMetricReaderFactory
@@ -1013,7 +1032,11 @@ class CustomMetricsCSVDataWriter(fs: FileSystem, file: Path) extends CSVDataWrit
       override def name(): String = "custom_metric"
       override def value(): Long = 12345
     }
-    Array(metric)
+    val innerMetric = new CustomTaskMetric {
+      override def name(): String = "inner_metric"
+      override def value(): Long = 54321;
+    }
+    Array(metric, innerMetric)
   }
 }
 
@@ -1055,7 +1078,7 @@ class CustomMetricsDataSource extends SimpleWritableDataSource {
         }
 
         override def supportedCustomMetrics(): Array[CustomMetric] = {
-          Array(new SimpleCustomMetric)
+          Array(new SimpleCustomMetric, new Outer.InnerCustomMetric)
         }
       }
     }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org