You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ge...@apache.org on 2021/07/20 13:32:16 UTC

[spark] branch branch-3.2 updated: [SPARK-36046][SQL][FOLLOWUP] Implement prettyName for MakeTimestampNTZ and MakeTimestampLTZ

This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 0f6cf8a  [SPARK-36046][SQL][FOLLOWUP] Implement prettyName for MakeTimestampNTZ and MakeTimestampLTZ
0f6cf8a is described below

commit 0f6cf8abe34416ea4e9db5bd11f9f022c9fd7b7d
Author: gengjiaan <ge...@360.cn>
AuthorDate: Tue Jul 20 21:31:00 2021 +0800

    [SPARK-36046][SQL][FOLLOWUP] Implement prettyName for MakeTimestampNTZ and MakeTimestampLTZ
    
    ### What changes were proposed in this pull request?
    This PR follows https://github.com/apache/spark/pull/33299 and implement `prettyName` for `MakeTimestampNTZ` and `MakeTimestampLTZ` based on the discussion show below
    https://github.com/apache/spark/pull/33299/files#r668423810
    
    ### Why are the changes needed?
    This PR fix the incorrect alias usecase.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    Modifications are transparent to users.
    
    ### How was this patch tested?
    Jenkins test.
    
    Closes #33430 from beliefer/SPARK-36046-followup.
    
    Authored-by: gengjiaan <ge...@360.cn>
    Signed-off-by: Gengliang Wang <ge...@apache.org>
    (cherry picked from commit 033a5731b44723fd7434c5ee0a021d3787a621ef)
    Signed-off-by: Gengliang Wang <ge...@apache.org>
---
 .../org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala     | 4 ++--
 .../apache/spark/sql/catalyst/expressions/datetimeExpressions.scala   | 4 ++++
 2 files changed, 6 insertions(+), 2 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 234da76..5fce4b6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -555,8 +555,8 @@ object FunctionRegistry {
     expression[SessionWindow]("session_window"),
     expression[MakeDate]("make_date"),
     expression[MakeTimestamp]("make_timestamp"),
-    expression[MakeTimestampNTZ]("make_timestamp_ntz", true),
-    expression[MakeTimestampLTZ]("make_timestamp_ltz", true),
+    expression[MakeTimestampNTZ]("make_timestamp_ntz"),
+    expression[MakeTimestampLTZ]("make_timestamp_ltz"),
     expression[MakeInterval]("make_interval"),
     expression[MakeDTInterval]("make_dt_interval"),
     expression[MakeYMInterval]("make_ym_interval"),
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 1146ba7..bc2e33b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -2403,6 +2403,8 @@ case class MakeTimestampNTZ(
       MakeTimestamp(year, month, day, hour, min, sec, dataType = TimestampNTZType))
   }
 
+  override def prettyName: String = "make_timestamp_ntz"
+
   override def exprsReplaced: Seq[Expression] = Seq(year, month, day, hour, min, sec)
 
   override protected def withNewChildInternal(newChild: Expression): Expression =
@@ -2473,6 +2475,8 @@ case class MakeTimestampLTZ(
       MakeTimestamp(year, month, day, hour, min, sec, Some(timezone), dataType = TimestampType))
   }
 
+  override def prettyName: String = "make_timestamp_ltz"
+
   override def exprsReplaced: Seq[Expression] = Seq(year, month, day, hour, min, sec)
 
   override protected def withNewChildInternal(newChild: Expression): Expression =

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org