You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2023/03/27 16:27:24 UTC

[spark] branch branch-3.4 updated: [SPARK-42930][CORE][SQL] Change the access scope of `ProtobufSerDe` related implementations to `private[protobuf]`

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new c7018597fd6 [SPARK-42930][CORE][SQL] Change the access scope of `ProtobufSerDe` related implementations to `private[protobuf]`
c7018597fd6 is described below

commit c7018597fd66153d912b6ca7d0c5c1750338b704
Author: yangjie01 <ya...@baidu.com>
AuthorDate: Mon Mar 27 09:27:01 2023 -0700

    [SPARK-42930][CORE][SQL] Change the access scope of `ProtobufSerDe` related implementations to `private[protobuf]`
    
    ### What changes were proposed in this pull request?
    After [SPARK-41053](https://issues.apache.org/jira/browse/SPARK-41053), Spark supports serializing/ Live UI data to RocksDB using protobuf, but these are internal implementation details, so this pr change the access scope of `ProtobufSerDe` related implementations to `private[protobuf]`.
    
    ### Why are the changes needed?
    Weaker the access scope of Spark internal implementation details.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    Closes #40560 from LuciferYang/SPARK-42930.
    
    Authored-by: yangjie01 <ya...@baidu.com>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
    (cherry picked from commit b8f16bc6c3400dce13795c6dfa176dd793341df0)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 .../scala/org/apache/spark/status/protobuf/AppSummarySerializer.scala  | 2 +-
 .../status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala  | 2 +-
 .../spark/status/protobuf/ApplicationInfoWrapperSerializer.scala       | 3 ++-
 .../org/apache/spark/status/protobuf/CachedQuantileSerializer.scala    | 2 +-
 .../org/apache/spark/status/protobuf/ExecutorMetricsSerializer.scala   | 2 +-
 .../spark/status/protobuf/ExecutorStageSummaryWrapperSerializer.scala  | 2 +-
 .../spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala       | 3 ++-
 .../org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala    | 2 +-
 .../scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala    | 2 +-
 .../apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala | 3 ++-
 .../spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala     | 3 ++-
 .../apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala | 3 ++-
 .../spark/status/protobuf/ResourceProfileWrapperSerializer.scala       | 3 ++-
 .../status/protobuf/SpeculationStageSummaryWrapperSerializer.scala     | 2 +-
 .../org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala  | 2 +-
 .../org/apache/spark/status/protobuf/StreamBlockDataSerializer.scala   | 2 +-
 .../org/apache/spark/status/protobuf/TaskDataWrapperSerializer.scala   | 2 +-
 core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala       | 2 +-
 .../spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala       | 2 +-
 .../org/apache/spark/status/protobuf/sql/SQLPlanMetricSerializer.scala | 2 +-
 .../spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala    | 3 ++-
 .../spark/status/protobuf/sql/StateOperatorProgressSerializer.scala    | 2 +-
 .../spark/status/protobuf/sql/StreamingQueryDataSerializer.scala       | 2 +-
 .../status/protobuf/sql/StreamingQueryProgressWrapperSerializer.scala  | 3 ++-
 24 files changed, 32 insertions(+), 24 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/AppSummarySerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/AppSummarySerializer.scala
index ec17ef1dc3b..5fe424b09dd 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/AppSummarySerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/AppSummarySerializer.scala
@@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf
 
 import org.apache.spark.status.AppSummary
 
-class AppSummarySerializer extends ProtobufSerDe[AppSummary] {
+private[protobuf] class AppSummarySerializer extends ProtobufSerDe[AppSummary] {
 
   override def serialize(input: AppSummary): Array[Byte] = {
     val builder = StoreTypes.AppSummary.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala
index 63c8387a8db..c91bc83ad91 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala
@@ -24,7 +24,7 @@ import org.apache.spark.status.ApplicationEnvironmentInfoWrapper
 import org.apache.spark.status.api.v1.{ApplicationEnvironmentInfo, ResourceProfileInfo, RuntimeInfo}
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 
-class ApplicationEnvironmentInfoWrapperSerializer
+private[protobuf] class ApplicationEnvironmentInfoWrapperSerializer
   extends ProtobufSerDe[ApplicationEnvironmentInfoWrapper] {
 
   override def serialize(input: ApplicationEnvironmentInfoWrapper): Array[Byte] = {
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala
index 4b2bcfa1d1f..f8c473f1a38 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala
@@ -26,7 +26,8 @@ import org.apache.spark.status.api.v1.{ApplicationAttemptInfo, ApplicationInfo}
 import org.apache.spark.status.protobuf.Utils._
 
 
-class ApplicationInfoWrapperSerializer extends ProtobufSerDe[ApplicationInfoWrapper] {
+private[protobuf] class ApplicationInfoWrapperSerializer
+  extends ProtobufSerDe[ApplicationInfoWrapper] {
 
   override def serialize(j: ApplicationInfoWrapper): Array[Byte] = {
     val jobData = serializeApplicationInfo(j.info)
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/CachedQuantileSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/CachedQuantileSerializer.scala
index 3bbe0d1ddc8..89acf8e2425 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/CachedQuantileSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/CachedQuantileSerializer.scala
@@ -20,7 +20,7 @@ package org.apache.spark.status.protobuf
 import org.apache.spark.status.CachedQuantile
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 
-class CachedQuantileSerializer extends ProtobufSerDe[CachedQuantile] {
+private[protobuf] class CachedQuantileSerializer extends ProtobufSerDe[CachedQuantile] {
 
   override def serialize(data: CachedQuantile): Array[Byte] = {
     val builder = StoreTypes.CachedQuantile.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorMetricsSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorMetricsSerializer.scala
index be3617e786a..8818d73f158 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorMetricsSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorMetricsSerializer.scala
@@ -20,7 +20,7 @@ package org.apache.spark.status.protobuf
 import org.apache.spark.executor.ExecutorMetrics
 import org.apache.spark.metrics.ExecutorMetricType
 
-object ExecutorMetricsSerializer {
+private[protobuf] object ExecutorMetricsSerializer {
   def serialize(e: ExecutorMetrics): StoreTypes.ExecutorMetrics = {
     val builder = StoreTypes.ExecutorMetrics.newBuilder()
     ExecutorMetricType.metricToOffset.foreach { case (metric, _) =>
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorStageSummaryWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorStageSummaryWrapperSerializer.scala
index 8e41a857057..c37013595a4 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorStageSummaryWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorStageSummaryWrapperSerializer.scala
@@ -21,7 +21,7 @@ import org.apache.spark.status.ExecutorStageSummaryWrapper
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 import org.apache.spark.util.Utils.weakIntern
 
-class ExecutorStageSummaryWrapperSerializer
+private[protobuf] class ExecutorStageSummaryWrapperSerializer
   extends ProtobufSerDe[ExecutorStageSummaryWrapper] {
 
   override def serialize(input: ExecutorStageSummaryWrapper): Array[Byte] = {
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala
index e3585feeb44..381210e18d9 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala
@@ -27,7 +27,8 @@ import org.apache.spark.status.api.v1.{ExecutorSummary, MemoryMetrics}
 import org.apache.spark.status.protobuf.Utils.{getOptional, getStringField, setStringField}
 import org.apache.spark.util.Utils.weakIntern
 
-class ExecutorSummaryWrapperSerializer extends ProtobufSerDe[ExecutorSummaryWrapper] {
+private[protobuf] class ExecutorSummaryWrapperSerializer
+  extends ProtobufSerDe[ExecutorSummaryWrapper] {
 
   override def serialize(input: ExecutorSummaryWrapper): Array[Byte] = {
     val info = serializeExecutorSummary(input.info)
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala
index 55bb4e2549e..d7aad2845d6 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala
@@ -25,7 +25,7 @@ import org.apache.spark.status.JobDataWrapper
 import org.apache.spark.status.api.v1.JobData
 import org.apache.spark.status.protobuf.Utils.{getOptional, getStringField, setStringField}
 
-class JobDataWrapperSerializer extends ProtobufSerDe[JobDataWrapper] {
+private[protobuf] class JobDataWrapperSerializer extends ProtobufSerDe[JobDataWrapper] {
 
   override def serialize(j: JobDataWrapper): Array[Byte] = {
     val jobData = serializeJobData(j.info)
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala
index b1600d97a85..a47308fc74a 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala
@@ -22,7 +22,7 @@ import scala.collection.JavaConverters._
 import org.apache.spark.status.PoolData
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 
-class PoolDataSerializer extends ProtobufSerDe[PoolData] {
+private[protobuf] class PoolDataSerializer extends ProtobufSerDe[PoolData] {
 
   override def serialize(input: PoolData): Array[Byte] = {
     val builder = StoreTypes.PoolData.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala
index 3a5d224f41b..b21d6540738 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala
@@ -25,7 +25,8 @@ import org.apache.spark.status.ProcessSummaryWrapper
 import org.apache.spark.status.api.v1.ProcessSummary
 import org.apache.spark.status.protobuf.Utils.{getOptional, getStringField, setStringField}
 
-class ProcessSummaryWrapperSerializer extends ProtobufSerDe[ProcessSummaryWrapper] {
+private[protobuf] class ProcessSummaryWrapperSerializer
+  extends ProtobufSerDe[ProcessSummaryWrapper] {
 
   override def serialize(input: ProcessSummaryWrapper): Array[Byte] = {
     val builder = StoreTypes.ProcessSummaryWrapper.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala
index 3187b255d4c..6d24d64c43b 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala
@@ -25,7 +25,8 @@ import org.apache.spark.status.protobuf.StoreTypes.{DeterministicLevel => GDeter
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 import org.apache.spark.ui.scope.{RDDOperationEdge, RDDOperationNode}
 
-class RDDOperationGraphWrapperSerializer extends ProtobufSerDe[RDDOperationGraphWrapper] {
+private[protobuf] class RDDOperationGraphWrapperSerializer
+  extends ProtobufSerDe[RDDOperationGraphWrapper] {
 
   override def serialize(op: RDDOperationGraphWrapper): Array[Byte] = {
     val builder = StoreTypes.RDDOperationGraphWrapper.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala
index fef8c5d478e..f58ae0fb7f0 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala
@@ -24,7 +24,8 @@ import org.apache.spark.status.api.v1.{RDDDataDistribution, RDDPartitionInfo, RD
 import org.apache.spark.status.protobuf.Utils.{getOptional, getStringField, setStringField}
 import org.apache.spark.util.Utils.weakIntern
 
-class RDDStorageInfoWrapperSerializer extends ProtobufSerDe[RDDStorageInfoWrapper] {
+private[protobuf] class RDDStorageInfoWrapperSerializer
+  extends ProtobufSerDe[RDDStorageInfoWrapper] {
 
   override def serialize(input: RDDStorageInfoWrapper): Array[Byte] = {
     val builder = StoreTypes.RDDStorageInfoWrapper.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ResourceProfileWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ResourceProfileWrapperSerializer.scala
index d9d29cc8d88..3078055a7c3 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/ResourceProfileWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/ResourceProfileWrapperSerializer.scala
@@ -19,7 +19,8 @@ package org.apache.spark.status.protobuf
 
 import org.apache.spark.status.ResourceProfileWrapper
 
-class ResourceProfileWrapperSerializer extends ProtobufSerDe[ResourceProfileWrapper] {
+private[protobuf] class ResourceProfileWrapperSerializer
+  extends ProtobufSerDe[ResourceProfileWrapper] {
 
   private val appEnvSerializer = new ApplicationEnvironmentInfoWrapperSerializer
 
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/SpeculationStageSummaryWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/SpeculationStageSummaryWrapperSerializer.scala
index 1b9a1ecfce6..155ab52d11c 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/SpeculationStageSummaryWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/SpeculationStageSummaryWrapperSerializer.scala
@@ -20,7 +20,7 @@ package org.apache.spark.status.protobuf
 import org.apache.spark.status.SpeculationStageSummaryWrapper
 import org.apache.spark.status.api.v1.SpeculationStageSummary
 
-class SpeculationStageSummaryWrapperSerializer
+private[protobuf] class SpeculationStageSummaryWrapperSerializer
   extends ProtobufSerDe[SpeculationStageSummaryWrapper] {
 
   override def serialize(s: SpeculationStageSummaryWrapper): Array[Byte] = {
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala
index 25394c1a719..df0c81d6964 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala
@@ -27,7 +27,7 @@ import org.apache.spark.status.api.v1.{ExecutorMetricsDistributions, ExecutorPea
 import org.apache.spark.status.protobuf.Utils._
 import org.apache.spark.util.Utils.weakIntern
 
-class StageDataWrapperSerializer extends ProtobufSerDe[StageDataWrapper] {
+private[protobuf] class StageDataWrapperSerializer extends ProtobufSerDe[StageDataWrapper] {
 
   override def serialize(input: StageDataWrapper): Array[Byte] = {
     val builder = StoreTypes.StageDataWrapper.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/StreamBlockDataSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/StreamBlockDataSerializer.scala
index fff7cf8ffc4..264e433e32d 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/StreamBlockDataSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/StreamBlockDataSerializer.scala
@@ -21,7 +21,7 @@ import org.apache.spark.status.StreamBlockData
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 import org.apache.spark.util.Utils.weakIntern
 
-class StreamBlockDataSerializer extends ProtobufSerDe[StreamBlockData] {
+private[protobuf] class StreamBlockDataSerializer extends ProtobufSerDe[StreamBlockData] {
 
   override def serialize(data: StreamBlockData): Array[Byte] = {
     val builder = StoreTypes.StreamBlockData.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/TaskDataWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/TaskDataWrapperSerializer.scala
index 298a1612212..8c729fa9478 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/TaskDataWrapperSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/TaskDataWrapperSerializer.scala
@@ -21,7 +21,7 @@ import org.apache.spark.status.TaskDataWrapper
 import org.apache.spark.status.protobuf.Utils.{getOptional, getStringField, setStringField}
 import org.apache.spark.util.Utils.weakIntern
 
-class TaskDataWrapperSerializer extends ProtobufSerDe[TaskDataWrapper] {
+private[protobuf] class TaskDataWrapperSerializer extends ProtobufSerDe[TaskDataWrapper] {
 
   override def serialize(input: TaskDataWrapper): Array[Byte] = {
     val builder = StoreTypes.TaskDataWrapper.newBuilder()
diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala b/core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala
index cef6df3f569..bfd6eb9f2e0 100644
--- a/core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/status/protobuf/Utils.scala
@@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf
 
 import java.util.{Map => JMap}
 
-object Utils {
+private[protobuf] object Utils {
   def getOptional[T](condition: Boolean, result: () => T): Option[T] = if (condition) {
     Some(result())
   } else {
diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala
index f0cdca985b7..7131004801b 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.execution.ui.SQLExecutionUIData
 import org.apache.spark.status.protobuf.{JobExecutionStatusSerializer, ProtobufSerDe, StoreTypes}
 import org.apache.spark.status.protobuf.Utils._
 
-class SQLExecutionUIDataSerializer extends ProtobufSerDe[SQLExecutionUIData] {
+private[protobuf] class SQLExecutionUIDataSerializer extends ProtobufSerDe[SQLExecutionUIData] {
 
   override def serialize(ui: SQLExecutionUIData): Array[Byte] = {
     val builder = StoreTypes.SQLExecutionUIData.newBuilder()
diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLPlanMetricSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLPlanMetricSerializer.scala
index 88ba51c52b4..a0c15c3c322 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLPlanMetricSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLPlanMetricSerializer.scala
@@ -22,7 +22,7 @@ import org.apache.spark.status.protobuf.StoreTypes
 import org.apache.spark.status.protobuf.Utils._
 import org.apache.spark.util.Utils.weakIntern
 
-object SQLPlanMetricSerializer {
+private[protobuf] object SQLPlanMetricSerializer {
 
   def serialize(metric: SQLPlanMetric): StoreTypes.SQLPlanMetric = {
     val builder = StoreTypes.SQLPlanMetric.newBuilder()
diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala
index bff5c0d7619..89e33c243cb 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala
@@ -25,7 +25,8 @@ import org.apache.spark.status.protobuf.StoreTypes
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 import org.apache.spark.util.Utils.weakIntern
 
-class SparkPlanGraphWrapperSerializer extends ProtobufSerDe[SparkPlanGraphWrapper] {
+private[protobuf] class SparkPlanGraphWrapperSerializer
+  extends ProtobufSerDe[SparkPlanGraphWrapper] {
 
   override def serialize(plan: SparkPlanGraphWrapper): Array[Byte] = {
     val builder = StoreTypes.SparkPlanGraphWrapper.newBuilder()
diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StateOperatorProgressSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StateOperatorProgressSerializer.scala
index 8b66e8e289b..951dac17462 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StateOperatorProgressSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StateOperatorProgressSerializer.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.streaming.StateOperatorProgress
 import org.apache.spark.status.protobuf.StoreTypes
 import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField}
 
-object StateOperatorProgressSerializer {
+private[protobuf] object StateOperatorProgressSerializer {
 
   def serialize(stateOperator: StateOperatorProgress): StoreTypes.StateOperatorProgress = {
     import org.apache.spark.status.protobuf.Utils.setJMapField
diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryDataSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryDataSerializer.scala
index 65758594c40..3511f62f0b6 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryDataSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryDataSerializer.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.streaming.ui.StreamingQueryData
 import org.apache.spark.status.protobuf.{ProtobufSerDe, StoreTypes}
 import org.apache.spark.status.protobuf.Utils._
 
-class StreamingQueryDataSerializer extends ProtobufSerDe[StreamingQueryData] {
+private[protobuf] class StreamingQueryDataSerializer extends ProtobufSerDe[StreamingQueryData] {
 
   override def serialize(data: StreamingQueryData): Array[Byte] = {
     val builder = StoreTypes.StreamingQueryData.newBuilder()
diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryProgressWrapperSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryProgressWrapperSerializer.scala
index 21a0adc26da..f732ff8cb35 100644
--- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryProgressWrapperSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/StreamingQueryProgressWrapperSerializer.scala
@@ -20,7 +20,8 @@ package org.apache.spark.status.protobuf.sql
 import org.apache.spark.sql.streaming.ui.StreamingQueryProgressWrapper
 import org.apache.spark.status.protobuf.{ProtobufSerDe, StoreTypes}
 
-class StreamingQueryProgressWrapperSerializer extends ProtobufSerDe[StreamingQueryProgressWrapper] {
+private[protobuf] class StreamingQueryProgressWrapperSerializer
+  extends ProtobufSerDe[StreamingQueryProgressWrapper] {
 
   override def serialize(data: StreamingQueryProgressWrapper): Array[Byte] = {
     val builder = StoreTypes.StreamingQueryProgressWrapper.newBuilder()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org