You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2023/03/21 08:27:28 UTC
[spark] branch branch-3.4 updated: [SPARK-42876][SQL] DataType's physicalDataType should be private[sql]
This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push:
new 8cffa5c9c9f [SPARK-42876][SQL] DataType's physicalDataType should be private[sql]
8cffa5c9c9f is described below
commit 8cffa5c9c9f83d6c0ba2e6d490c2e658e089303a
Author: Rui Wang <ru...@databricks.com>
AuthorDate: Tue Mar 21 16:26:47 2023 +0800
[SPARK-42876][SQL] DataType's physicalDataType should be private[sql]
### What changes were proposed in this pull request?
`physicalDataType` should not be a public API but be private[sql].
### Why are the changes needed?
This is to limit API scope to not expose unnecessary API to be public.
### Does this PR introduce _any_ user-facing change?
No since we have not released Spark 3.4.0 yet.
### How was this patch tested?
N/A
Closes #40499 from amaliujia/change_scope_of_physical_data_type.
Authored-by: Rui Wang <ru...@databricks.com>
Signed-off-by: Wenchen Fan <we...@databricks.com>
(cherry picked from commit c9a530e38e7f4ff3a491245c1d3ecaa1755c87ad)
Signed-off-by: Wenchen Fan <we...@databricks.com>
---
sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/BinaryType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/BooleanType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala | 2 +-
.../main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/CharType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala | 2 +-
.../main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/DecimalType.scala | 3 ++-
.../src/main/scala/org/apache/spark/sql/types/DoubleType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/IntegerType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala | 2 +-
sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/StringType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/StructType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/TimestampType.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/types/VarcharType.scala | 2 +-
.../main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala | 2 +-
23 files changed, 24 insertions(+), 23 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
index 3e5f447a762..9665385f046 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala
@@ -91,7 +91,7 @@ case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataT
*/
override def defaultSize: Int = 1 * elementType.defaultSize
- override def physicalDataType: PhysicalDataType =
+ private[sql] override def physicalDataType: PhysicalDataType =
PhysicalArrayType(elementType, containsNull)
override def simpleString: String = s"array<${elementType.simpleString}>"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
index d2998f533de..cba437dc68f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BinaryType.scala
@@ -45,7 +45,7 @@ class BinaryType private() extends AtomicType {
*/
override def defaultSize: Int = 100
- override def physicalDataType: PhysicalDataType = PhysicalBinaryType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalBinaryType
private[spark] override def asNullable: BinaryType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
index d8766e95e20..ba707dc4548 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/BooleanType.scala
@@ -42,7 +42,7 @@ class BooleanType private() extends AtomicType {
*/
override def defaultSize: Int = 1
- override def physicalDataType: PhysicalDataType = PhysicalBooleanType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalBooleanType
private[spark] override def asNullable: BooleanType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
index 7c361fc78e2..91b385b0bea 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ByteType.scala
@@ -45,7 +45,7 @@ class ByteType private() extends IntegralType {
*/
override def defaultSize: Int = 1
- override def physicalDataType: PhysicalDataType = PhysicalByteType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalByteType
override def simpleString: String = "tinyint"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
index 6073aacb03e..7f1c51ef23d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CalendarIntervalType.scala
@@ -38,7 +38,7 @@ class CalendarIntervalType private() extends DataType {
override def defaultSize: Int = 16
- override def physicalDataType: PhysicalDataType = PhysicalCalendarIntervalType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalCalendarIntervalType
override def typeName: String = "interval"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CharType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CharType.scala
index 6bc6d39f143..9a8727fb412 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CharType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/CharType.scala
@@ -33,7 +33,7 @@ case class CharType(length: Int) extends AtomicType {
private[sql] val ordering = implicitly[Ordering[InternalType]]
override def defaultSize: Int = length
- override def physicalDataType: PhysicalDataType = PhysicalStringType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalStringType
override def typeName: String = s"char($length)"
override def toString: String = s"CharType($length)"
private[spark] override def asNullable: CharType = this
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index 08d6f312066..13a7b03bc61 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -119,7 +119,7 @@ abstract class DataType extends AbstractDataType {
override private[sql] def acceptsType(other: DataType): Boolean = sameType(other)
- def physicalDataType: PhysicalDataType = UninitializedPhysicalType
+ private[sql] def physicalDataType: PhysicalDataType = UninitializedPhysicalType
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
index 0a794266acd..a38be782eab 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DateType.scala
@@ -47,7 +47,7 @@ class DateType private() extends DatetimeType {
*/
override def defaultSize: Int = 4
- override def physicalDataType: PhysicalDataType = PhysicalIntegerType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalIntegerType
private[spark] override def asNullable: DateType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala
index 802c8a76637..b8444e0b737 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala
@@ -61,7 +61,7 @@ case class DayTimeIntervalType(startField: Byte, endField: Byte) extends AnsiInt
*/
override def defaultSize: Int = 8
- override def physicalDataType: PhysicalDataType = PhysicalLongType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalLongType
private[spark] override def asNullable: DayTimeIntervalType = this
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 2813771ce48..18710fc47da 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -111,7 +111,8 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
*/
override def defaultSize: Int = if (precision <= Decimal.MAX_LONG_DIGITS) 8 else 16
- override def physicalDataType: PhysicalDataType = PhysicalDecimalType(precision, scale)
+ private[sql] override def physicalDataType: PhysicalDataType =
+ PhysicalDecimalType(precision, scale)
override def simpleString: String = s"decimal($precision,$scale)"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
index cef0681e88d..b6beeae1a70 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala
@@ -50,7 +50,7 @@ class DoubleType private() extends FractionalType {
*/
override def defaultSize: Int = 8
- override def physicalDataType: PhysicalDataType = PhysicalDoubleType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalDoubleType
private[spark] override def asNullable: DoubleType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
index 2e3992546d0..9ecbec1db75 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala
@@ -50,7 +50,7 @@ class FloatType private() extends FractionalType {
*/
override def defaultSize: Int = 4
- override def physicalDataType: PhysicalDataType = PhysicalFloatType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalFloatType
private[spark] override def asNullable: FloatType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
index d58a4b63554..4985a6d5629 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/IntegerType.scala
@@ -45,7 +45,7 @@ class IntegerType private() extends IntegralType {
*/
override def defaultSize: Int = 4
- override def physicalDataType: PhysicalDataType = PhysicalIntegerType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalIntegerType
override def simpleString: String = "int"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
index be0560657c7..a8e8072156c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/LongType.scala
@@ -45,7 +45,7 @@ class LongType private() extends IntegralType {
*/
override def defaultSize: Int = 8
- override def physicalDataType: PhysicalDataType = PhysicalLongType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalLongType
override def simpleString: String = "bigint"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
index df7c18edc8a..48a8e77250a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala
@@ -68,7 +68,7 @@ case class MapType(
*/
override def defaultSize: Int = 1 * (keyType.defaultSize + valueType.defaultSize)
- override def physicalDataType: PhysicalDataType =
+ private[sql] override def physicalDataType: PhysicalDataType =
PhysicalMapType(keyType, valueType, valueContainsNull)
override def simpleString: String = s"map<${keyType.simpleString},${valueType.simpleString}>"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
index 171c9a6a67d..e5e8c4b6a7c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala
@@ -32,7 +32,7 @@ class NullType private() extends DataType {
// Defined with a private constructor so the companion object is the only possible instantiation.
override def defaultSize: Int = 1
- override def physicalDataType: PhysicalDataType = PhysicalNullType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalNullType
private[spark] override def asNullable: NullType = this
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
index 3d40610c168..f0eb01bc287 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ShortType.scala
@@ -45,7 +45,7 @@ class ShortType private() extends IntegralType {
*/
override def defaultSize: Int = 2
- override def physicalDataType: PhysicalDataType = PhysicalShortType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalShortType
override def simpleString: String = "smallint"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
index 9ab40d3d89e..2fdb834d10e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala
@@ -43,7 +43,7 @@ class StringType private() extends AtomicType {
*/
override def defaultSize: Int = 20
- override def physicalDataType: PhysicalDataType = PhysicalStringType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalStringType
private[spark] override def asNullable: StringType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index a9c3829a721..9ef3c4d60fd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -431,7 +431,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
*/
override def defaultSize: Int = fields.map(_.dataType.defaultSize).sum
- override def physicalDataType: PhysicalDataType = PhysicalStructType(fields)
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalStructType(fields)
override def simpleString: String = {
val fieldTypes = fields.view.map(field => s"${field.name}:${field.dataType.simpleString}").toSeq
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala
index a554a0bcfa3..5b43a6e2323 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala
@@ -48,7 +48,7 @@ class TimestampNTZType private() extends DatetimeType {
*/
override def defaultSize: Int = 8
- override def physicalDataType: PhysicalDataType = PhysicalLongType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalLongType
override def typeName: String = "timestamp_ntz"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
index b3a45275f2f..2683fad5a05 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampType.scala
@@ -49,7 +49,7 @@ class TimestampType private() extends DatetimeType {
*/
override def defaultSize: Int = 8
- override def physicalDataType: PhysicalDataType = PhysicalLongType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalLongType
private[spark] override def asNullable: TimestampType = this
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/VarcharType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/VarcharType.scala
index eab9be096ff..b6dadfce089 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/VarcharType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/VarcharType.scala
@@ -28,7 +28,7 @@ case class VarcharType(length: Int) extends AtomicType {
require(length >= 0, "The length of varchar type cannot be negative.")
private[sql] type InternalType = UTF8String
- override def physicalDataType: PhysicalDataType = PhysicalStringType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalStringType
@transient private[sql] lazy val tag = typeTag[InternalType]
private[sql] val ordering = implicitly[Ordering[InternalType]]
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala
index 5ed3b5574ef..969d0a52a88 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala
@@ -59,7 +59,7 @@ case class YearMonthIntervalType(startField: Byte, endField: Byte) extends AnsiI
*/
override def defaultSize: Int = 4
- override def physicalDataType: PhysicalDataType = PhysicalIntegerType
+ private[sql] override def physicalDataType: PhysicalDataType = PhysicalIntegerType
private[spark] override def asNullable: YearMonthIntervalType = this
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org