You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by hv...@apache.org on 2022/11/18 18:35:00 UTC

[spark] branch master updated: [SPARK-41196][CONNECT] Homogenize the protobuf version across the Spark connect server to use the same major version

This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d1db4ca6815 [SPARK-41196][CONNECT] Homogenize the protobuf version across the Spark connect server to use the same major version
d1db4ca6815 is described below

commit d1db4ca681553a6ef9bc7bfea699e42279ce60b2
Author: Martin Grund <ma...@databricks.com>
AuthorDate: Fri Nov 18 14:34:45 2022 -0400

    [SPARK-41196][CONNECT] Homogenize the protobuf version across the Spark connect server to use the same major version
    
    ### What changes were proposed in this pull request?
    
    This patch homogenize the protobuf versions between the Spark Connect server and Python clients to use the same major version.
    
    ### Why are the changes needed?
    Compatibility
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Existing UT.
    
    Closes #38693 from grundprinzip/proto-python.
    
    Lead-authored-by: Martin Grund <ma...@databricks.com>
    Co-authored-by: Martin Grund <gr...@gmail.com>
    Signed-off-by: Herman van Hovell <he...@databricks.com>
---
 connector/connect/src/main/buf.gen.yaml            |   2 +-
 dev/requirements.txt                               |   2 +-
 python/pyspark/sql/connect/proto/base_pb2.py       | 134 +++++++-
 python/pyspark/sql/connect/proto/commands_pb2.py   |  80 ++++-
 .../pyspark/sql/connect/proto/expressions_pb2.py   | 198 ++++++++++-
 python/pyspark/sql/connect/proto/relations_pb2.py  | 367 ++++++++++++++++++++-
 python/pyspark/sql/connect/proto/types_pb2.py      | 293 +++++++++++++++-
 7 files changed, 1059 insertions(+), 17 deletions(-)

diff --git a/connector/connect/src/main/buf.gen.yaml b/connector/connect/src/main/buf.gen.yaml
index 2a4b5df44dc..d74d08632fd 100644
--- a/connector/connect/src/main/buf.gen.yaml
+++ b/connector/connect/src/main/buf.gen.yaml
@@ -27,7 +27,7 @@ plugins:
   - remote: buf.build/protocolbuffers/plugins/ruby:v21.2.0-1
     out: gen/proto/ruby
    # Building the Python build and building the mypy interfaces.
-  - remote: buf.build/protocolbuffers/plugins/python:v3.20.0-1
+  - remote: buf.build/protocolbuffers/plugins/python:v3.19.3-1
     out: gen/proto/python
   - remote: buf.build/grpc/plugins/python:v1.47.0-1
     out: gen/proto/python
diff --git a/dev/requirements.txt b/dev/requirements.txt
index 914c26b1fa1..d9497f63ad4 100644
--- a/dev/requirements.txt
+++ b/dev/requirements.txt
@@ -50,7 +50,7 @@ black==22.6.0
 
 # Spark Connect (required)
 grpcio==1.48.1
-protobuf==4.21.6
+protobuf==3.19.4
 
 # Spark Connect python proto generation plugin (optional)
 mypy-protobuf==3.3.0
diff --git a/python/pyspark/sql/connect/proto/base_pb2.py b/python/pyspark/sql/connect/proto/base_pb2.py
index 8f61cde151e..dac0f661318 100644
--- a/python/pyspark/sql/connect/proto/base_pb2.py
+++ b/python/pyspark/sql/connect/proto/base_pb2.py
@@ -18,9 +18,10 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: spark/connect/base.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
 from google.protobuf import symbol_database as _symbol_database
 
 # @@protoc_insertion_point(imports)
@@ -38,8 +39,135 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
     b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02 \x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"\xb5\x01\n\x07\x45xplain\x12\x45\n\x0c\x65xplain_mode\x18\x01 \x01(\x0e\x32".spark.connect.Explain. [...]
 )
 
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.base_pb2", globals())
+
+_PLAN = DESCRIPTOR.message_types_by_name["Plan"]
+_REQUEST = DESCRIPTOR.message_types_by_name["Request"]
+_REQUEST_USERCONTEXT = _REQUEST.nested_types_by_name["UserContext"]
+_RESPONSE = DESCRIPTOR.message_types_by_name["Response"]
+_RESPONSE_ARROWBATCH = _RESPONSE.nested_types_by_name["ArrowBatch"]
+_RESPONSE_JSONBATCH = _RESPONSE.nested_types_by_name["JSONBatch"]
+_RESPONSE_METRICS = _RESPONSE.nested_types_by_name["Metrics"]
+_RESPONSE_METRICS_METRICOBJECT = _RESPONSE_METRICS.nested_types_by_name["MetricObject"]
+_RESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY = (
+    _RESPONSE_METRICS_METRICOBJECT.nested_types_by_name["ExecutionMetricsEntry"]
+)
+_RESPONSE_METRICS_METRICVALUE = _RESPONSE_METRICS.nested_types_by_name["MetricValue"]
+_ANALYZERESPONSE = DESCRIPTOR.message_types_by_name["AnalyzeResponse"]
+Plan = _reflection.GeneratedProtocolMessageType(
+    "Plan",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _PLAN,
+        "__module__": "spark.connect.base_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Plan)
+    },
+)
+_sym_db.RegisterMessage(Plan)
+
+Request = _reflection.GeneratedProtocolMessageType(
+    "Request",
+    (_message.Message,),
+    {
+        "UserContext": _reflection.GeneratedProtocolMessageType(
+            "UserContext",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _REQUEST_USERCONTEXT,
+                "__module__": "spark.connect.base_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Request.UserContext)
+            },
+        ),
+        "DESCRIPTOR": _REQUEST,
+        "__module__": "spark.connect.base_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Request)
+    },
+)
+_sym_db.RegisterMessage(Request)
+_sym_db.RegisterMessage(Request.UserContext)
+
+Response = _reflection.GeneratedProtocolMessageType(
+    "Response",
+    (_message.Message,),
+    {
+        "ArrowBatch": _reflection.GeneratedProtocolMessageType(
+            "ArrowBatch",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _RESPONSE_ARROWBATCH,
+                "__module__": "spark.connect.base_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Response.ArrowBatch)
+            },
+        ),
+        "JSONBatch": _reflection.GeneratedProtocolMessageType(
+            "JSONBatch",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _RESPONSE_JSONBATCH,
+                "__module__": "spark.connect.base_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Response.JSONBatch)
+            },
+        ),
+        "Metrics": _reflection.GeneratedProtocolMessageType(
+            "Metrics",
+            (_message.Message,),
+            {
+                "MetricObject": _reflection.GeneratedProtocolMessageType(
+                    "MetricObject",
+                    (_message.Message,),
+                    {
+                        "ExecutionMetricsEntry": _reflection.GeneratedProtocolMessageType(
+                            "ExecutionMetricsEntry",
+                            (_message.Message,),
+                            {
+                                "DESCRIPTOR": _RESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY,
+                                "__module__": "spark.connect.base_pb2"
+                                # @@protoc_insertion_point(class_scope:spark.connect.Response.Metrics.MetricObject.ExecutionMetricsEntry)
+                            },
+                        ),
+                        "DESCRIPTOR": _RESPONSE_METRICS_METRICOBJECT,
+                        "__module__": "spark.connect.base_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Response.Metrics.MetricObject)
+                    },
+                ),
+                "MetricValue": _reflection.GeneratedProtocolMessageType(
+                    "MetricValue",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _RESPONSE_METRICS_METRICVALUE,
+                        "__module__": "spark.connect.base_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Response.Metrics.MetricValue)
+                    },
+                ),
+                "DESCRIPTOR": _RESPONSE_METRICS,
+                "__module__": "spark.connect.base_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Response.Metrics)
+            },
+        ),
+        "DESCRIPTOR": _RESPONSE,
+        "__module__": "spark.connect.base_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Response)
+    },
+)
+_sym_db.RegisterMessage(Response)
+_sym_db.RegisterMessage(Response.ArrowBatch)
+_sym_db.RegisterMessage(Response.JSONBatch)
+_sym_db.RegisterMessage(Response.Metrics)
+_sym_db.RegisterMessage(Response.Metrics.MetricObject)
+_sym_db.RegisterMessage(Response.Metrics.MetricObject.ExecutionMetricsEntry)
+_sym_db.RegisterMessage(Response.Metrics.MetricValue)
+
+AnalyzeResponse = _reflection.GeneratedProtocolMessageType(
+    "AnalyzeResponse",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _ANALYZERESPONSE,
+        "__module__": "spark.connect.base_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.AnalyzeResponse)
+    },
+)
+_sym_db.RegisterMessage(AnalyzeResponse)
+
+_SPARKCONNECTSERVICE = DESCRIPTOR.services_by_name["SparkConnectService"]
 if _descriptor._USE_C_DESCRIPTORS == False:
 
     DESCRIPTOR._options = None
diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py b/python/pyspark/sql/connect/proto/commands_pb2.py
index 11f53322ce7..e7069a4b9a7 100644
--- a/python/pyspark/sql/connect/proto/commands_pb2.py
+++ b/python/pyspark/sql/connect/proto/commands_pb2.py
@@ -18,9 +18,10 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: spark/connect/commands.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
 from google.protobuf import symbol_database as _symbol_database
 
 # @@protoc_insertion_point(imports)
@@ -36,8 +37,81 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
     b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\x94\x02\n\x07\x43ommand\x12N\n\x0f\x63reate_function\x18\x01 \x01(\x0b\x32#.spark.connect.CreateScalarFunctionH\x00R\x0e\x63reateFunction\x12H\n\x0fwrite_operation\x18\x02 \x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperation\x12_\n\x15\x63reate_dataframe_view\x18\x03 \x01(\x0b\x32).spark.connect.CreateDataFrameViewCommandH\x00R\x13\x63reateD [...]
 )
 
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.commands_pb2", globals())
+
+_COMMAND = DESCRIPTOR.message_types_by_name["Command"]
+_CREATESCALARFUNCTION = DESCRIPTOR.message_types_by_name["CreateScalarFunction"]
+_CREATEDATAFRAMEVIEWCOMMAND = DESCRIPTOR.message_types_by_name["CreateDataFrameViewCommand"]
+_WRITEOPERATION = DESCRIPTOR.message_types_by_name["WriteOperation"]
+_WRITEOPERATION_OPTIONSENTRY = _WRITEOPERATION.nested_types_by_name["OptionsEntry"]
+_WRITEOPERATION_BUCKETBY = _WRITEOPERATION.nested_types_by_name["BucketBy"]
+_CREATESCALARFUNCTION_FUNCTIONLANGUAGE = _CREATESCALARFUNCTION.enum_types_by_name[
+    "FunctionLanguage"
+]
+_WRITEOPERATION_SAVEMODE = _WRITEOPERATION.enum_types_by_name["SaveMode"]
+Command = _reflection.GeneratedProtocolMessageType(
+    "Command",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _COMMAND,
+        "__module__": "spark.connect.commands_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Command)
+    },
+)
+_sym_db.RegisterMessage(Command)
+
+CreateScalarFunction = _reflection.GeneratedProtocolMessageType(
+    "CreateScalarFunction",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _CREATESCALARFUNCTION,
+        "__module__": "spark.connect.commands_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.CreateScalarFunction)
+    },
+)
+_sym_db.RegisterMessage(CreateScalarFunction)
+
+CreateDataFrameViewCommand = _reflection.GeneratedProtocolMessageType(
+    "CreateDataFrameViewCommand",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _CREATEDATAFRAMEVIEWCOMMAND,
+        "__module__": "spark.connect.commands_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.CreateDataFrameViewCommand)
+    },
+)
+_sym_db.RegisterMessage(CreateDataFrameViewCommand)
+
+WriteOperation = _reflection.GeneratedProtocolMessageType(
+    "WriteOperation",
+    (_message.Message,),
+    {
+        "OptionsEntry": _reflection.GeneratedProtocolMessageType(
+            "OptionsEntry",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _WRITEOPERATION_OPTIONSENTRY,
+                "__module__": "spark.connect.commands_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation.OptionsEntry)
+            },
+        ),
+        "BucketBy": _reflection.GeneratedProtocolMessageType(
+            "BucketBy",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _WRITEOPERATION_BUCKETBY,
+                "__module__": "spark.connect.commands_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation.BucketBy)
+            },
+        ),
+        "DESCRIPTOR": _WRITEOPERATION,
+        "__module__": "spark.connect.commands_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation)
+    },
+)
+_sym_db.RegisterMessage(WriteOperation)
+_sym_db.RegisterMessage(WriteOperation.OptionsEntry)
+_sym_db.RegisterMessage(WriteOperation.BucketBy)
+
 if _descriptor._USE_C_DESCRIPTORS == False:
 
     DESCRIPTOR._options = None
diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.py b/python/pyspark/sql/connect/proto/expressions_pb2.py
index dca9d2cef47..c372df7d324 100644
--- a/python/pyspark/sql/connect/proto/expressions_pb2.py
+++ b/python/pyspark/sql/connect/proto/expressions_pb2.py
@@ -18,9 +18,10 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: spark/connect/expressions.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
 from google.protobuf import symbol_database as _symbol_database
 
 # @@protoc_insertion_point(imports)
@@ -36,8 +37,199 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
     b'\n\x1fspark/connect/expressions.proto\x12\rspark.connect\x1a\x19spark/connect/types.proto\x1a\x19google/protobuf/any.proto"\xf0\x17\n\nExpression\x12=\n\x07literal\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x07literal\x12\x62\n\x14unresolved_attribute\x18\x02 \x01(\x0b\x32-.spark.connect.Expression.UnresolvedAttributeH\x00R\x13unresolvedAttribute\x12_\n\x13unresolved_function\x18\x03 \x01(\x0b\x32,.spark.connect.Expression.UnresolvedFunctionH\x00R\x12unresolvedFu [...]
 )
 
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.expressions_pb2", globals())
+
+_EXPRESSION = DESCRIPTOR.message_types_by_name["Expression"]
+_EXPRESSION_LITERAL = _EXPRESSION.nested_types_by_name["Literal"]
+_EXPRESSION_LITERAL_VARCHAR = _EXPRESSION_LITERAL.nested_types_by_name["VarChar"]
+_EXPRESSION_LITERAL_DECIMAL = _EXPRESSION_LITERAL.nested_types_by_name["Decimal"]
+_EXPRESSION_LITERAL_MAP = _EXPRESSION_LITERAL.nested_types_by_name["Map"]
+_EXPRESSION_LITERAL_MAP_KEYVALUE = _EXPRESSION_LITERAL_MAP.nested_types_by_name["KeyValue"]
+_EXPRESSION_LITERAL_INTERVALYEARTOMONTH = _EXPRESSION_LITERAL.nested_types_by_name[
+    "IntervalYearToMonth"
+]
+_EXPRESSION_LITERAL_INTERVALDAYTOSECOND = _EXPRESSION_LITERAL.nested_types_by_name[
+    "IntervalDayToSecond"
+]
+_EXPRESSION_LITERAL_STRUCT = _EXPRESSION_LITERAL.nested_types_by_name["Struct"]
+_EXPRESSION_LITERAL_LIST = _EXPRESSION_LITERAL.nested_types_by_name["List"]
+_EXPRESSION_LITERAL_USERDEFINED = _EXPRESSION_LITERAL.nested_types_by_name["UserDefined"]
+_EXPRESSION_UNRESOLVEDATTRIBUTE = _EXPRESSION.nested_types_by_name["UnresolvedAttribute"]
+_EXPRESSION_UNRESOLVEDFUNCTION = _EXPRESSION.nested_types_by_name["UnresolvedFunction"]
+_EXPRESSION_EXPRESSIONSTRING = _EXPRESSION.nested_types_by_name["ExpressionString"]
+_EXPRESSION_UNRESOLVEDSTAR = _EXPRESSION.nested_types_by_name["UnresolvedStar"]
+_EXPRESSION_QUALIFIEDATTRIBUTE = _EXPRESSION.nested_types_by_name["QualifiedAttribute"]
+_EXPRESSION_ALIAS = _EXPRESSION.nested_types_by_name["Alias"]
+Expression = _reflection.GeneratedProtocolMessageType(
+    "Expression",
+    (_message.Message,),
+    {
+        "Literal": _reflection.GeneratedProtocolMessageType(
+            "Literal",
+            (_message.Message,),
+            {
+                "VarChar": _reflection.GeneratedProtocolMessageType(
+                    "VarChar",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_VARCHAR,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.VarChar)
+                    },
+                ),
+                "Decimal": _reflection.GeneratedProtocolMessageType(
+                    "Decimal",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_DECIMAL,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Decimal)
+                    },
+                ),
+                "Map": _reflection.GeneratedProtocolMessageType(
+                    "Map",
+                    (_message.Message,),
+                    {
+                        "KeyValue": _reflection.GeneratedProtocolMessageType(
+                            "KeyValue",
+                            (_message.Message,),
+                            {
+                                "DESCRIPTOR": _EXPRESSION_LITERAL_MAP_KEYVALUE,
+                                "__module__": "spark.connect.expressions_pb2"
+                                # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Map.KeyValue)
+                            },
+                        ),
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_MAP,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Map)
+                    },
+                ),
+                "IntervalYearToMonth": _reflection.GeneratedProtocolMessageType(
+                    "IntervalYearToMonth",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_INTERVALYEARTOMONTH,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.IntervalYearToMonth)
+                    },
+                ),
+                "IntervalDayToSecond": _reflection.GeneratedProtocolMessageType(
+                    "IntervalDayToSecond",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_INTERVALDAYTOSECOND,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.IntervalDayToSecond)
+                    },
+                ),
+                "Struct": _reflection.GeneratedProtocolMessageType(
+                    "Struct",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_STRUCT,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Struct)
+                    },
+                ),
+                "List": _reflection.GeneratedProtocolMessageType(
+                    "List",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_LIST,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.List)
+                    },
+                ),
+                "UserDefined": _reflection.GeneratedProtocolMessageType(
+                    "UserDefined",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _EXPRESSION_LITERAL_USERDEFINED,
+                        "__module__": "spark.connect.expressions_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.UserDefined)
+                    },
+                ),
+                "DESCRIPTOR": _EXPRESSION_LITERAL,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal)
+            },
+        ),
+        "UnresolvedAttribute": _reflection.GeneratedProtocolMessageType(
+            "UnresolvedAttribute",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _EXPRESSION_UNRESOLVEDATTRIBUTE,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedAttribute)
+            },
+        ),
+        "UnresolvedFunction": _reflection.GeneratedProtocolMessageType(
+            "UnresolvedFunction",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _EXPRESSION_UNRESOLVEDFUNCTION,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedFunction)
+            },
+        ),
+        "ExpressionString": _reflection.GeneratedProtocolMessageType(
+            "ExpressionString",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _EXPRESSION_EXPRESSIONSTRING,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.ExpressionString)
+            },
+        ),
+        "UnresolvedStar": _reflection.GeneratedProtocolMessageType(
+            "UnresolvedStar",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _EXPRESSION_UNRESOLVEDSTAR,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedStar)
+            },
+        ),
+        "QualifiedAttribute": _reflection.GeneratedProtocolMessageType(
+            "QualifiedAttribute",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _EXPRESSION_QUALIFIEDATTRIBUTE,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.QualifiedAttribute)
+            },
+        ),
+        "Alias": _reflection.GeneratedProtocolMessageType(
+            "Alias",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _EXPRESSION_ALIAS,
+                "__module__": "spark.connect.expressions_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Expression.Alias)
+            },
+        ),
+        "DESCRIPTOR": _EXPRESSION,
+        "__module__": "spark.connect.expressions_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Expression)
+    },
+)
+_sym_db.RegisterMessage(Expression)
+_sym_db.RegisterMessage(Expression.Literal)
+_sym_db.RegisterMessage(Expression.Literal.VarChar)
+_sym_db.RegisterMessage(Expression.Literal.Decimal)
+_sym_db.RegisterMessage(Expression.Literal.Map)
+_sym_db.RegisterMessage(Expression.Literal.Map.KeyValue)
+_sym_db.RegisterMessage(Expression.Literal.IntervalYearToMonth)
+_sym_db.RegisterMessage(Expression.Literal.IntervalDayToSecond)
+_sym_db.RegisterMessage(Expression.Literal.Struct)
+_sym_db.RegisterMessage(Expression.Literal.List)
+_sym_db.RegisterMessage(Expression.Literal.UserDefined)
+_sym_db.RegisterMessage(Expression.UnresolvedAttribute)
+_sym_db.RegisterMessage(Expression.UnresolvedFunction)
+_sym_db.RegisterMessage(Expression.ExpressionString)
+_sym_db.RegisterMessage(Expression.UnresolvedStar)
+_sym_db.RegisterMessage(Expression.QualifiedAttribute)
+_sym_db.RegisterMessage(Expression.Alias)
+
 if _descriptor._USE_C_DESCRIPTORS == False:
 
     DESCRIPTOR._options = None
diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py b/python/pyspark/sql/connect/proto/relations_pb2.py
index 9bc5e75ea64..c586e4bdeda 100644
--- a/python/pyspark/sql/connect/proto/relations_pb2.py
+++ b/python/pyspark/sql/connect/proto/relations_pb2.py
@@ -18,9 +18,10 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: spark/connect/relations.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
 from google.protobuf import symbol_database as _symbol_database
 
 # @@protoc_insertion_point(imports)
@@ -35,8 +36,368 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
     b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\xa6\x0b\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01 \x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02 \x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03 \x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x15.spark.connect.FilterH\x00R\x06\x66ilter\x12)\n\x04join\x18\x05 \x01(\x0 [...]
 )
 
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.relations_pb2", globals())
+
+_RELATION = DESCRIPTOR.message_types_by_name["Relation"]
+_UNKNOWN = DESCRIPTOR.message_types_by_name["Unknown"]
+_RELATIONCOMMON = DESCRIPTOR.message_types_by_name["RelationCommon"]
+_SQL = DESCRIPTOR.message_types_by_name["SQL"]
+_READ = DESCRIPTOR.message_types_by_name["Read"]
+_READ_NAMEDTABLE = _READ.nested_types_by_name["NamedTable"]
+_READ_DATASOURCE = _READ.nested_types_by_name["DataSource"]
+_READ_DATASOURCE_OPTIONSENTRY = _READ_DATASOURCE.nested_types_by_name["OptionsEntry"]
+_PROJECT = DESCRIPTOR.message_types_by_name["Project"]
+_FILTER = DESCRIPTOR.message_types_by_name["Filter"]
+_JOIN = DESCRIPTOR.message_types_by_name["Join"]
+_SETOPERATION = DESCRIPTOR.message_types_by_name["SetOperation"]
+_LIMIT = DESCRIPTOR.message_types_by_name["Limit"]
+_OFFSET = DESCRIPTOR.message_types_by_name["Offset"]
+_AGGREGATE = DESCRIPTOR.message_types_by_name["Aggregate"]
+_SORT = DESCRIPTOR.message_types_by_name["Sort"]
+_SORT_SORTFIELD = _SORT.nested_types_by_name["SortField"]
+_DEDUPLICATE = DESCRIPTOR.message_types_by_name["Deduplicate"]
+_LOCALRELATION = DESCRIPTOR.message_types_by_name["LocalRelation"]
+_SAMPLE = DESCRIPTOR.message_types_by_name["Sample"]
+_RANGE = DESCRIPTOR.message_types_by_name["Range"]
+_SUBQUERYALIAS = DESCRIPTOR.message_types_by_name["SubqueryAlias"]
+_REPARTITION = DESCRIPTOR.message_types_by_name["Repartition"]
+_SHOWSTRING = DESCRIPTOR.message_types_by_name["ShowString"]
+_STATSUMMARY = DESCRIPTOR.message_types_by_name["StatSummary"]
+_STATCROSSTAB = DESCRIPTOR.message_types_by_name["StatCrosstab"]
+_NAFILL = DESCRIPTOR.message_types_by_name["NAFill"]
+_RENAMECOLUMNSBYSAMELENGTHNAMES = DESCRIPTOR.message_types_by_name["RenameColumnsBySameLengthNames"]
+_RENAMECOLUMNSBYNAMETONAMEMAP = DESCRIPTOR.message_types_by_name["RenameColumnsByNameToNameMap"]
+_RENAMECOLUMNSBYNAMETONAMEMAP_RENAMECOLUMNSMAPENTRY = (
+    _RENAMECOLUMNSBYNAMETONAMEMAP.nested_types_by_name["RenameColumnsMapEntry"]
+)
+_JOIN_JOINTYPE = _JOIN.enum_types_by_name["JoinType"]
+_SETOPERATION_SETOPTYPE = _SETOPERATION.enum_types_by_name["SetOpType"]
+_SORT_SORTDIRECTION = _SORT.enum_types_by_name["SortDirection"]
+_SORT_SORTNULLS = _SORT.enum_types_by_name["SortNulls"]
+Relation = _reflection.GeneratedProtocolMessageType(
+    "Relation",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _RELATION,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Relation)
+    },
+)
+_sym_db.RegisterMessage(Relation)
+
+Unknown = _reflection.GeneratedProtocolMessageType(
+    "Unknown",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _UNKNOWN,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Unknown)
+    },
+)
+_sym_db.RegisterMessage(Unknown)
+
+RelationCommon = _reflection.GeneratedProtocolMessageType(
+    "RelationCommon",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _RELATIONCOMMON,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.RelationCommon)
+    },
+)
+_sym_db.RegisterMessage(RelationCommon)
+
+SQL = _reflection.GeneratedProtocolMessageType(
+    "SQL",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _SQL,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.SQL)
+    },
+)
+_sym_db.RegisterMessage(SQL)
+
+Read = _reflection.GeneratedProtocolMessageType(
+    "Read",
+    (_message.Message,),
+    {
+        "NamedTable": _reflection.GeneratedProtocolMessageType(
+            "NamedTable",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _READ_NAMEDTABLE,
+                "__module__": "spark.connect.relations_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Read.NamedTable)
+            },
+        ),
+        "DataSource": _reflection.GeneratedProtocolMessageType(
+            "DataSource",
+            (_message.Message,),
+            {
+                "OptionsEntry": _reflection.GeneratedProtocolMessageType(
+                    "OptionsEntry",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _READ_DATASOURCE_OPTIONSENTRY,
+                        "__module__": "spark.connect.relations_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.Read.DataSource.OptionsEntry)
+                    },
+                ),
+                "DESCRIPTOR": _READ_DATASOURCE,
+                "__module__": "spark.connect.relations_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Read.DataSource)
+            },
+        ),
+        "DESCRIPTOR": _READ,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Read)
+    },
+)
+_sym_db.RegisterMessage(Read)
+_sym_db.RegisterMessage(Read.NamedTable)
+_sym_db.RegisterMessage(Read.DataSource)
+_sym_db.RegisterMessage(Read.DataSource.OptionsEntry)
+
+Project = _reflection.GeneratedProtocolMessageType(
+    "Project",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _PROJECT,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Project)
+    },
+)
+_sym_db.RegisterMessage(Project)
+
+Filter = _reflection.GeneratedProtocolMessageType(
+    "Filter",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _FILTER,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Filter)
+    },
+)
+_sym_db.RegisterMessage(Filter)
+
+Join = _reflection.GeneratedProtocolMessageType(
+    "Join",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _JOIN,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Join)
+    },
+)
+_sym_db.RegisterMessage(Join)
+
+SetOperation = _reflection.GeneratedProtocolMessageType(
+    "SetOperation",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _SETOPERATION,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.SetOperation)
+    },
+)
+_sym_db.RegisterMessage(SetOperation)
+
+Limit = _reflection.GeneratedProtocolMessageType(
+    "Limit",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _LIMIT,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Limit)
+    },
+)
+_sym_db.RegisterMessage(Limit)
+
+Offset = _reflection.GeneratedProtocolMessageType(
+    "Offset",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _OFFSET,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Offset)
+    },
+)
+_sym_db.RegisterMessage(Offset)
+
+Aggregate = _reflection.GeneratedProtocolMessageType(
+    "Aggregate",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _AGGREGATE,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Aggregate)
+    },
+)
+_sym_db.RegisterMessage(Aggregate)
+
+Sort = _reflection.GeneratedProtocolMessageType(
+    "Sort",
+    (_message.Message,),
+    {
+        "SortField": _reflection.GeneratedProtocolMessageType(
+            "SortField",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _SORT_SORTFIELD,
+                "__module__": "spark.connect.relations_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.Sort.SortField)
+            },
+        ),
+        "DESCRIPTOR": _SORT,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Sort)
+    },
+)
+_sym_db.RegisterMessage(Sort)
+_sym_db.RegisterMessage(Sort.SortField)
+
+Deduplicate = _reflection.GeneratedProtocolMessageType(
+    "Deduplicate",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _DEDUPLICATE,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Deduplicate)
+    },
+)
+_sym_db.RegisterMessage(Deduplicate)
+
+LocalRelation = _reflection.GeneratedProtocolMessageType(
+    "LocalRelation",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _LOCALRELATION,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.LocalRelation)
+    },
+)
+_sym_db.RegisterMessage(LocalRelation)
+
+Sample = _reflection.GeneratedProtocolMessageType(
+    "Sample",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _SAMPLE,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Sample)
+    },
+)
+_sym_db.RegisterMessage(Sample)
+
+Range = _reflection.GeneratedProtocolMessageType(
+    "Range",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _RANGE,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Range)
+    },
+)
+_sym_db.RegisterMessage(Range)
+
+SubqueryAlias = _reflection.GeneratedProtocolMessageType(
+    "SubqueryAlias",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _SUBQUERYALIAS,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.SubqueryAlias)
+    },
+)
+_sym_db.RegisterMessage(SubqueryAlias)
+
+Repartition = _reflection.GeneratedProtocolMessageType(
+    "Repartition",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _REPARTITION,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.Repartition)
+    },
+)
+_sym_db.RegisterMessage(Repartition)
+
+ShowString = _reflection.GeneratedProtocolMessageType(
+    "ShowString",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _SHOWSTRING,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.ShowString)
+    },
+)
+_sym_db.RegisterMessage(ShowString)
+
+StatSummary = _reflection.GeneratedProtocolMessageType(
+    "StatSummary",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _STATSUMMARY,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.StatSummary)
+    },
+)
+_sym_db.RegisterMessage(StatSummary)
+
+StatCrosstab = _reflection.GeneratedProtocolMessageType(
+    "StatCrosstab",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _STATCROSSTAB,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.StatCrosstab)
+    },
+)
+_sym_db.RegisterMessage(StatCrosstab)
+
+NAFill = _reflection.GeneratedProtocolMessageType(
+    "NAFill",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _NAFILL,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.NAFill)
+    },
+)
+_sym_db.RegisterMessage(NAFill)
+
+RenameColumnsBySameLengthNames = _reflection.GeneratedProtocolMessageType(
+    "RenameColumnsBySameLengthNames",
+    (_message.Message,),
+    {
+        "DESCRIPTOR": _RENAMECOLUMNSBYSAMELENGTHNAMES,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.RenameColumnsBySameLengthNames)
+    },
+)
+_sym_db.RegisterMessage(RenameColumnsBySameLengthNames)
+
+RenameColumnsByNameToNameMap = _reflection.GeneratedProtocolMessageType(
+    "RenameColumnsByNameToNameMap",
+    (_message.Message,),
+    {
+        "RenameColumnsMapEntry": _reflection.GeneratedProtocolMessageType(
+            "RenameColumnsMapEntry",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _RENAMECOLUMNSBYNAMETONAMEMAP_RENAMECOLUMNSMAPENTRY,
+                "__module__": "spark.connect.relations_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.RenameColumnsByNameToNameMap.RenameColumnsMapEntry)
+            },
+        ),
+        "DESCRIPTOR": _RENAMECOLUMNSBYNAMETONAMEMAP,
+        "__module__": "spark.connect.relations_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.RenameColumnsByNameToNameMap)
+    },
+)
+_sym_db.RegisterMessage(RenameColumnsByNameToNameMap)
+_sym_db.RegisterMessage(RenameColumnsByNameToNameMap.RenameColumnsMapEntry)
+
 if _descriptor._USE_C_DESCRIPTORS == False:
 
     DESCRIPTOR._options = None
diff --git a/python/pyspark/sql/connect/proto/types_pb2.py b/python/pyspark/sql/connect/proto/types_pb2.py
index e9044437142..3507b03602c 100644
--- a/python/pyspark/sql/connect/proto/types_pb2.py
+++ b/python/pyspark/sql/connect/proto/types_pb2.py
@@ -18,9 +18,10 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: spark/connect/types.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
 from google.protobuf import symbol_database as _symbol_database
 
 # @@protoc_insertion_point(imports)
@@ -32,8 +33,294 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
     b'\n\x19spark/connect/types.proto\x12\rspark.connect"\xc1\x1c\n\x08\x44\x61taType\x12\x35\n\x04\x62ool\x18\x01 \x01(\x0b\x32\x1f.spark.connect.DataType.BooleanH\x00R\x04\x62ool\x12,\n\x02i8\x18\x02 \x01(\x0b\x32\x1a.spark.connect.DataType.I8H\x00R\x02i8\x12/\n\x03i16\x18\x03 \x01(\x0b\x32\x1b.spark.connect.DataType.I16H\x00R\x03i16\x12/\n\x03i32\x18\x05 \x01(\x0b\x32\x1b.spark.connect.DataType.I32H\x00R\x03i32\x12/\n\x03i64\x18\x07 \x01(\x0b\x32\x1b.spark.connect.DataType.I64H\x00R\x [...]
 )
 
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.types_pb2", globals())
+
+_DATATYPE = DESCRIPTOR.message_types_by_name["DataType"]
+_DATATYPE_BOOLEAN = _DATATYPE.nested_types_by_name["Boolean"]
+_DATATYPE_I8 = _DATATYPE.nested_types_by_name["I8"]
+_DATATYPE_I16 = _DATATYPE.nested_types_by_name["I16"]
+_DATATYPE_I32 = _DATATYPE.nested_types_by_name["I32"]
+_DATATYPE_I64 = _DATATYPE.nested_types_by_name["I64"]
+_DATATYPE_FP32 = _DATATYPE.nested_types_by_name["FP32"]
+_DATATYPE_FP64 = _DATATYPE.nested_types_by_name["FP64"]
+_DATATYPE_STRING = _DATATYPE.nested_types_by_name["String"]
+_DATATYPE_BINARY = _DATATYPE.nested_types_by_name["Binary"]
+_DATATYPE_TIMESTAMP = _DATATYPE.nested_types_by_name["Timestamp"]
+_DATATYPE_DATE = _DATATYPE.nested_types_by_name["Date"]
+_DATATYPE_TIME = _DATATYPE.nested_types_by_name["Time"]
+_DATATYPE_TIMESTAMPTZ = _DATATYPE.nested_types_by_name["TimestampTZ"]
+_DATATYPE_INTERVALYEAR = _DATATYPE.nested_types_by_name["IntervalYear"]
+_DATATYPE_INTERVALDAY = _DATATYPE.nested_types_by_name["IntervalDay"]
+_DATATYPE_UUID = _DATATYPE.nested_types_by_name["UUID"]
+_DATATYPE_FIXEDCHAR = _DATATYPE.nested_types_by_name["FixedChar"]
+_DATATYPE_VARCHAR = _DATATYPE.nested_types_by_name["VarChar"]
+_DATATYPE_FIXEDBINARY = _DATATYPE.nested_types_by_name["FixedBinary"]
+_DATATYPE_DECIMAL = _DATATYPE.nested_types_by_name["Decimal"]
+_DATATYPE_STRUCTFIELD = _DATATYPE.nested_types_by_name["StructField"]
+_DATATYPE_STRUCTFIELD_METADATAENTRY = _DATATYPE_STRUCTFIELD.nested_types_by_name["MetadataEntry"]
+_DATATYPE_STRUCT = _DATATYPE.nested_types_by_name["Struct"]
+_DATATYPE_LIST = _DATATYPE.nested_types_by_name["List"]
+_DATATYPE_MAP = _DATATYPE.nested_types_by_name["Map"]
+DataType = _reflection.GeneratedProtocolMessageType(
+    "DataType",
+    (_message.Message,),
+    {
+        "Boolean": _reflection.GeneratedProtocolMessageType(
+            "Boolean",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_BOOLEAN,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Boolean)
+            },
+        ),
+        "I8": _reflection.GeneratedProtocolMessageType(
+            "I8",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_I8,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.I8)
+            },
+        ),
+        "I16": _reflection.GeneratedProtocolMessageType(
+            "I16",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_I16,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.I16)
+            },
+        ),
+        "I32": _reflection.GeneratedProtocolMessageType(
+            "I32",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_I32,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.I32)
+            },
+        ),
+        "I64": _reflection.GeneratedProtocolMessageType(
+            "I64",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_I64,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.I64)
+            },
+        ),
+        "FP32": _reflection.GeneratedProtocolMessageType(
+            "FP32",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_FP32,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.FP32)
+            },
+        ),
+        "FP64": _reflection.GeneratedProtocolMessageType(
+            "FP64",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_FP64,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.FP64)
+            },
+        ),
+        "String": _reflection.GeneratedProtocolMessageType(
+            "String",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_STRING,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.String)
+            },
+        ),
+        "Binary": _reflection.GeneratedProtocolMessageType(
+            "Binary",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_BINARY,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Binary)
+            },
+        ),
+        "Timestamp": _reflection.GeneratedProtocolMessageType(
+            "Timestamp",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_TIMESTAMP,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Timestamp)
+            },
+        ),
+        "Date": _reflection.GeneratedProtocolMessageType(
+            "Date",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_DATE,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Date)
+            },
+        ),
+        "Time": _reflection.GeneratedProtocolMessageType(
+            "Time",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_TIME,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Time)
+            },
+        ),
+        "TimestampTZ": _reflection.GeneratedProtocolMessageType(
+            "TimestampTZ",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_TIMESTAMPTZ,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.TimestampTZ)
+            },
+        ),
+        "IntervalYear": _reflection.GeneratedProtocolMessageType(
+            "IntervalYear",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_INTERVALYEAR,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.IntervalYear)
+            },
+        ),
+        "IntervalDay": _reflection.GeneratedProtocolMessageType(
+            "IntervalDay",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_INTERVALDAY,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.IntervalDay)
+            },
+        ),
+        "UUID": _reflection.GeneratedProtocolMessageType(
+            "UUID",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_UUID,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.UUID)
+            },
+        ),
+        "FixedChar": _reflection.GeneratedProtocolMessageType(
+            "FixedChar",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_FIXEDCHAR,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.FixedChar)
+            },
+        ),
+        "VarChar": _reflection.GeneratedProtocolMessageType(
+            "VarChar",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_VARCHAR,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.VarChar)
+            },
+        ),
+        "FixedBinary": _reflection.GeneratedProtocolMessageType(
+            "FixedBinary",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_FIXEDBINARY,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.FixedBinary)
+            },
+        ),
+        "Decimal": _reflection.GeneratedProtocolMessageType(
+            "Decimal",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_DECIMAL,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Decimal)
+            },
+        ),
+        "StructField": _reflection.GeneratedProtocolMessageType(
+            "StructField",
+            (_message.Message,),
+            {
+                "MetadataEntry": _reflection.GeneratedProtocolMessageType(
+                    "MetadataEntry",
+                    (_message.Message,),
+                    {
+                        "DESCRIPTOR": _DATATYPE_STRUCTFIELD_METADATAENTRY,
+                        "__module__": "spark.connect.types_pb2"
+                        # @@protoc_insertion_point(class_scope:spark.connect.DataType.StructField.MetadataEntry)
+                    },
+                ),
+                "DESCRIPTOR": _DATATYPE_STRUCTFIELD,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.StructField)
+            },
+        ),
+        "Struct": _reflection.GeneratedProtocolMessageType(
+            "Struct",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_STRUCT,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Struct)
+            },
+        ),
+        "List": _reflection.GeneratedProtocolMessageType(
+            "List",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_LIST,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.List)
+            },
+        ),
+        "Map": _reflection.GeneratedProtocolMessageType(
+            "Map",
+            (_message.Message,),
+            {
+                "DESCRIPTOR": _DATATYPE_MAP,
+                "__module__": "spark.connect.types_pb2"
+                # @@protoc_insertion_point(class_scope:spark.connect.DataType.Map)
+            },
+        ),
+        "DESCRIPTOR": _DATATYPE,
+        "__module__": "spark.connect.types_pb2"
+        # @@protoc_insertion_point(class_scope:spark.connect.DataType)
+    },
+)
+_sym_db.RegisterMessage(DataType)
+_sym_db.RegisterMessage(DataType.Boolean)
+_sym_db.RegisterMessage(DataType.I8)
+_sym_db.RegisterMessage(DataType.I16)
+_sym_db.RegisterMessage(DataType.I32)
+_sym_db.RegisterMessage(DataType.I64)
+_sym_db.RegisterMessage(DataType.FP32)
+_sym_db.RegisterMessage(DataType.FP64)
+_sym_db.RegisterMessage(DataType.String)
+_sym_db.RegisterMessage(DataType.Binary)
+_sym_db.RegisterMessage(DataType.Timestamp)
+_sym_db.RegisterMessage(DataType.Date)
+_sym_db.RegisterMessage(DataType.Time)
+_sym_db.RegisterMessage(DataType.TimestampTZ)
+_sym_db.RegisterMessage(DataType.IntervalYear)
+_sym_db.RegisterMessage(DataType.IntervalDay)
+_sym_db.RegisterMessage(DataType.UUID)
+_sym_db.RegisterMessage(DataType.FixedChar)
+_sym_db.RegisterMessage(DataType.VarChar)
+_sym_db.RegisterMessage(DataType.FixedBinary)
+_sym_db.RegisterMessage(DataType.Decimal)
+_sym_db.RegisterMessage(DataType.StructField)
+_sym_db.RegisterMessage(DataType.StructField.MetadataEntry)
+_sym_db.RegisterMessage(DataType.Struct)
+_sym_db.RegisterMessage(DataType.List)
+_sym_db.RegisterMessage(DataType.Map)
+
 if _descriptor._USE_C_DESCRIPTORS == False:
 
     DESCRIPTOR._options = None


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org