You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2018/07/05 08:10:40 UTC
spark git commit: [SPARK-24673][SQL] scala sql function
from_utc_timestamp second argument could be Column instead of String
Repository: spark
Updated Branches:
refs/heads/master f997be0c3 -> 4be9f0c02
[SPARK-24673][SQL] scala sql function from_utc_timestamp second argument could be Column instead of String
## What changes were proposed in this pull request?
Add an overloaded version to `from_utc_timestamp` and `to_utc_timestamp` having second argument as a `Column` instead of `String`.
## How was this patch tested?
Unit testing, especially adding two tests to org.apache.spark.sql.DateFunctionsSuite.scala
Author: Antonio Murgia <an...@agilelab.it>
Author: Antonio Murgia <an...@studio.unibo.it>
Closes #21693 from tmnd1991/feature/SPARK-24673.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4be9f0c0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4be9f0c0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4be9f0c0
Branch: refs/heads/master
Commit: 4be9f0c028cebb0d2975e93a6ebc56337cd2c585
Parents: f997be0
Author: Antonio Murgia <an...@agilelab.it>
Authored: Thu Jul 5 16:10:34 2018 +0800
Committer: hyukjinkwon <gu...@apache.org>
Committed: Thu Jul 5 16:10:34 2018 +0800
----------------------------------------------------------------------
.../scala/org/apache/spark/sql/functions.scala | 22 ++++++++++++
.../apache/spark/sql/DateFunctionsSuite.scala | 38 ++++++++++++++++++--
2 files changed, 58 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/4be9f0c0/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index 614f65f..f2627e6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -2935,6 +2935,17 @@ object functions {
}
/**
+ * Given a timestamp like '2017-07-14 02:40:00.0', interprets it as a time in UTC, and renders
+ * that time as a timestamp in the given time zone. For example, 'GMT+1' would yield
+ * '2017-07-14 03:40:00.0'.
+ * @group datetime_funcs
+ * @since 2.4.0
+ */
+ def from_utc_timestamp(ts: Column, tz: Column): Column = withExpr {
+ FromUTCTimestamp(ts.expr, tz.expr)
+ }
+
+ /**
* Given a timestamp like '2017-07-14 02:40:00.0', interprets it as a time in the given time
* zone, and renders that time as a timestamp in UTC. For example, 'GMT+1' would yield
* '2017-07-14 01:40:00.0'.
@@ -2946,6 +2957,17 @@ object functions {
}
/**
+ * Given a timestamp like '2017-07-14 02:40:00.0', interprets it as a time in the given time
+ * zone, and renders that time as a timestamp in UTC. For example, 'GMT+1' would yield
+ * '2017-07-14 01:40:00.0'.
+ * @group datetime_funcs
+ * @since 2.4.0
+ */
+ def to_utc_timestamp(ts: Column, tz: Column): Column = withExpr {
+ ToUTCTimestamp(ts.expr, tz.expr)
+ }
+
+ /**
* Bucketize rows into one or more time windows given a timestamp specifying column. Window
* starts are inclusive but the window ends are exclusive, e.g. 12:05 will be in the window
* [12:05,12:10) but not in [12:00,12:05). Windows can support microsecond precision. Windows in
http://git-wip-us.apache.org/repos/asf/spark/blob/4be9f0c0/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
index 237412a..3af80b3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
@@ -663,7 +663,7 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext {
checkAnswer(df.selectExpr("datediff(a, d)"), Seq(Row(1), Row(1)))
}
- test("from_utc_timestamp") {
+ test("from_utc_timestamp with literal zone") {
val df = Seq(
(Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00"),
(Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00")
@@ -680,7 +680,24 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext {
Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
}
- test("to_utc_timestamp") {
+ test("from_utc_timestamp with column zone") {
+ val df = Seq(
+ (Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00", "CET"),
+ (Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00", "PST")
+ ).toDF("a", "b", "c")
+ checkAnswer(
+ df.select(from_utc_timestamp(col("a"), col("c"))),
+ Seq(
+ Row(Timestamp.valueOf("2015-07-24 02:00:00")),
+ Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
+ checkAnswer(
+ df.select(from_utc_timestamp(col("b"), col("c"))),
+ Seq(
+ Row(Timestamp.valueOf("2015-07-24 02:00:00")),
+ Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
+ }
+
+ test("to_utc_timestamp with literal zone") {
val df = Seq(
(Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00"),
(Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00")
@@ -697,6 +714,23 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext {
Row(Timestamp.valueOf("2015-07-25 07:00:00"))))
}
+ test("to_utc_timestamp with column zone") {
+ val df = Seq(
+ (Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00", "PST"),
+ (Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00", "CET")
+ ).toDF("a", "b", "c")
+ checkAnswer(
+ df.select(to_utc_timestamp(col("a"), col("c"))),
+ Seq(
+ Row(Timestamp.valueOf("2015-07-24 07:00:00")),
+ Row(Timestamp.valueOf("2015-07-24 22:00:00"))))
+ checkAnswer(
+ df.select(to_utc_timestamp(col("b"), col("c"))),
+ Seq(
+ Row(Timestamp.valueOf("2015-07-24 07:00:00")),
+ Row(Timestamp.valueOf("2015-07-24 22:00:00"))))
+ }
+
test("SPARK-23715: to/from_utc_timestamp can retain the previous behavior") {
withSQLConf(SQLConf.REJECT_TIMEZONE_IN_STRING.key -> "false") {
checkAnswer(
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org