You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2022/11/09 06:42:47 UTC

[spark] branch master updated: [SPARK-41058][CONNECT] Remove unused import in commands.proto

This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6858ba95bf7 [SPARK-41058][CONNECT] Remove unused import in commands.proto
6858ba95bf7 is described below

commit 6858ba95bf7ecf37b2bee540cad3b9317f13781b
Author: dengziming <de...@bytedance.com>
AuthorDate: Wed Nov 9 14:42:28 2022 +0800

    [SPARK-41058][CONNECT] Remove unused import in commands.proto
    
    ### What changes were proposed in this pull request?
    expressions.proto is not used in commands.proto
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    github CI
    
    Closes #38491 from dengziming/minor-import.
    
    Authored-by: dengziming <de...@bytedance.com>
    Signed-off-by: Wenchen Fan <we...@databricks.com>
---
 .../src/main/protobuf/spark/connect/commands.proto |  1 -
 .../sql/connect/planner/SparkConnectPlanner.scala  |  7 -----
 .../service/SparkConnectStreamHandler.scala        |  3 ---
 python/pyspark/sql/connect/proto/commands_pb2.py   | 31 +++++++++++-----------
 4 files changed, 15 insertions(+), 27 deletions(-)

diff --git a/connector/connect/src/main/protobuf/spark/connect/commands.proto b/connector/connect/src/main/protobuf/spark/connect/commands.proto
index bc8bb478122..79c6cffdf60 100644
--- a/connector/connect/src/main/protobuf/spark/connect/commands.proto
+++ b/connector/connect/src/main/protobuf/spark/connect/commands.proto
@@ -17,7 +17,6 @@
 
 syntax = 'proto3';
 
-import "spark/connect/expressions.proto";
 import "spark/connect/relations.proto";
 import "spark/connect/types.proto";
 
diff --git a/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala b/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
index 6a5808bc77f..3bbdbf80276 100644
--- a/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
+++ b/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.sql.connect.planner
 
-import scala.annotation.elidable.byName
 import scala.collection.JavaConverters._
 
 import org.apache.spark.connect.proto
@@ -49,12 +48,6 @@ class SparkConnectPlanner(plan: proto.Relation, session: SparkSession) {
 
   // The root of the query plan is a relation and we apply the transformations to it.
   private def transformRelation(rel: proto.Relation): LogicalPlan = {
-    val common = if (rel.hasCommon) {
-      Some(rel.getCommon)
-    } else {
-      None
-    }
-
     rel.getRelTypeCase match {
       case proto.Relation.RelTypeCase.READ => transformReadRel(rel.getRead)
       case proto.Relation.RelTypeCase.PROJECT => transformProject(rel.getProject)
diff --git a/connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala b/connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala
index a429823c02f..58fc6237867 100644
--- a/connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala
+++ b/connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamHandler.scala
@@ -31,7 +31,6 @@ import org.apache.spark.sql.connect.command.SparkConnectCommandPlanner
 import org.apache.spark.sql.connect.planner.SparkConnectPlanner
 import org.apache.spark.sql.execution.SparkPlan
 import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanExec, AdaptiveSparkPlanHelper, QueryStageExec}
-import org.apache.spark.sql.internal.SQLConf
 
 class SparkConnectStreamHandler(responseObserver: StreamObserver[Response]) extends Logging {
 
@@ -58,8 +57,6 @@ class SparkConnectStreamHandler(responseObserver: StreamObserver[Response]) exte
   }
 
   def processRows(clientId: String, rows: DataFrame): Unit = {
-    val timeZoneId = SQLConf.get.sessionLocalTimeZone
-
     // Only process up to 10MB of data.
     val sb = new StringBuilder
     var rowCount = 0
diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py b/python/pyspark/sql/connect/proto/commands_pb2.py
index 905c621011f..fa05b6ff76c 100644
--- a/python/pyspark/sql/connect/proto/commands_pb2.py
+++ b/python/pyspark/sql/connect/proto/commands_pb2.py
@@ -28,13 +28,12 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
-from pyspark.sql.connect.proto import expressions_pb2 as spark_dot_connect_dot_expressions__pb2
 from pyspark.sql.connect.proto import relations_pb2 as spark_dot_connect_dot_relations__pb2
 from pyspark.sql.connect.proto import types_pb2 as spark_dot_connect_dot_types__pb2
 
 
 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-    b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\xb3\x01\n\x07\x43ommand\x12N\n\x0f\x63reate_function\x18\x01 \x01(\x0b\x32#.spark.connect.CreateScalarFunctionH\x00R\x0e\x63reateFunction\x12H\n\x0fwrite_operation\x18\x02 \x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperationB\x0e\n\x0c\x63ommand_type"\x97\x04\n\x14\x43reateScalarFunction\x12\x14\n\x05pa [...]
+    b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\xb3\x01\n\x07\x43ommand\x12N\n\x0f\x63reate_function\x18\x01 \x01(\x0b\x32#.spark.connect.CreateScalarFunctionH\x00R\x0e\x63reateFunction\x12H\n\x0fwrite_operation\x18\x02 \x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperationB\x0e\n\x0c\x63ommand_type"\x97\x04\n\x14\x43reateScalarFunction\x12\x14\n\x05parts\x18\x01 \x03(\tR\x05parts\x12P\n\x0 [...]
 )
 
 _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
@@ -45,18 +44,18 @@ if _descriptor._USE_C_DESCRIPTORS == False:
     DESCRIPTOR._serialized_options = b"\n\036org.apache.spark.connect.protoP\001"
     _WRITEOPERATION_OPTIONSENTRY._options = None
     _WRITEOPERATION_OPTIONSENTRY._serialized_options = b"8\001"
-    _COMMAND._serialized_start = 139
-    _COMMAND._serialized_end = 318
-    _CREATESCALARFUNCTION._serialized_start = 321
-    _CREATESCALARFUNCTION._serialized_end = 856
-    _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_start = 694
-    _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_end = 833
-    _WRITEOPERATION._serialized_start = 859
-    _WRITEOPERATION._serialized_end = 1601
-    _WRITEOPERATION_OPTIONSENTRY._serialized_start = 1297
-    _WRITEOPERATION_OPTIONSENTRY._serialized_end = 1355
-    _WRITEOPERATION_BUCKETBY._serialized_start = 1357
-    _WRITEOPERATION_BUCKETBY._serialized_end = 1448
-    _WRITEOPERATION_SAVEMODE._serialized_start = 1451
-    _WRITEOPERATION_SAVEMODE._serialized_end = 1588
+    _COMMAND._serialized_start = 106
+    _COMMAND._serialized_end = 285
+    _CREATESCALARFUNCTION._serialized_start = 288
+    _CREATESCALARFUNCTION._serialized_end = 823
+    _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_start = 661
+    _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_end = 800
+    _WRITEOPERATION._serialized_start = 826
+    _WRITEOPERATION._serialized_end = 1568
+    _WRITEOPERATION_OPTIONSENTRY._serialized_start = 1264
+    _WRITEOPERATION_OPTIONSENTRY._serialized_end = 1322
+    _WRITEOPERATION_BUCKETBY._serialized_start = 1324
+    _WRITEOPERATION_BUCKETBY._serialized_end = 1415
+    _WRITEOPERATION_SAVEMODE._serialized_start = 1418
+    _WRITEOPERATION_SAVEMODE._serialized_end = 1555
 # @@protoc_insertion_point(module_scope)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org