You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2021/07/21 17:58:24 UTC
[spark] branch branch-3.2 updated: [SPARK-36208][SQL][3.2]
SparkScriptTransformation should support ANSI interval types
This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.2 by this push:
new 468165a [SPARK-36208][SQL][3.2] SparkScriptTransformation should support ANSI interval types
468165a is described below
commit 468165ae52aa788e3fa59f385225b90c616bfa0f
Author: Kousuke Saruta <sa...@oss.nttdata.com>
AuthorDate: Wed Jul 21 20:54:18 2021 +0300
[SPARK-36208][SQL][3.2] SparkScriptTransformation should support ANSI interval types
### What changes were proposed in this pull request?
This PR changes `BaseScriptTransformationExec` for `SparkScriptTransformationExec` to support ANSI interval types.
### Why are the changes needed?
`SparkScriptTransformationExec` support `CalendarIntervalType` so it's better to support ANSI interval types as well.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
New test.
Authored-by: Kousuke Saruta <sarutakoss.nttdata.com>
Signed-off-by: Max Gekk <max.gekkgmail.com>
(cherry picked from commit f56c7b71ff27e6f5379f3699c2dcb5f79a0ae791)
Signed-off-by: Max Gekk <max.gekkgmail.com>
Closes #33463 from MaxGekk/sarutak_script-transformation-interval-3.2.
Authored-by: Kousuke Saruta <sa...@oss.nttdata.com>
Signed-off-by: Max Gekk <ma...@gmail.com>
---
.../execution/BaseScriptTransformationExec.scala | 8 ++++++++
.../execution/BaseScriptTransformationSuite.scala | 21 +++++++++++++++++++++
2 files changed, 29 insertions(+)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala
index 7835981..e249cd6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala
@@ -223,6 +223,14 @@ trait BaseScriptTransformationExec extends UnaryExecNode {
case CalendarIntervalType => wrapperConvertException(
data => IntervalUtils.stringToInterval(UTF8String.fromString(data)),
converter)
+ case YearMonthIntervalType(start, end) => wrapperConvertException(
+ data => IntervalUtils.monthsToPeriod(
+ IntervalUtils.castStringToYMInterval(UTF8String.fromString(data), start, end)),
+ converter)
+ case DayTimeIntervalType(start, end) => wrapperConvertException(
+ data => IntervalUtils.microsToDuration(
+ IntervalUtils.castStringToDTInterval(UTF8String.fromString(data), start, end)),
+ converter)
case _: ArrayType | _: MapType | _: StructType =>
val complexTypeFactory = JsonToStructs(attr.dataType,
ioschema.outputSerdeProps.toMap, Literal(null), Some(conf.sessionLocalTimeZone))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
index c845dd81..9d8fcda 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
@@ -633,6 +633,27 @@ abstract class BaseScriptTransformationSuite extends SparkPlanTest with SQLTestU
}
}
}
+
+ test("SPARK-36208: TRANSFORM should support ANSI interval (no serde)") {
+ assume(TestUtils.testCommandAvailable("python"))
+ withTempView("v") {
+ val df = Seq(
+ (Period.of(1, 2, 0), Duration.ofDays(1).plusHours(2).plusMinutes(3).plusSeconds(4))
+ ).toDF("ym", "dt")
+
+ checkAnswer(
+ df,
+ (child: SparkPlan) => createScriptTransformationExec(
+ script = "cat",
+ output = Seq(
+ AttributeReference("ym", YearMonthIntervalType())(),
+ AttributeReference("dt", DayTimeIntervalType())()),
+ child = child,
+ ioschema = defaultIOSchema
+ ),
+ df.select($"ym", $"dt").collect())
+ }
+ }
}
case class ExceptionInjectingOperator(child: SparkPlan) extends UnaryExecNode {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org