You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2021/02/27 13:46:23 UTC
[spark] branch branch-3.1 updated: [SPARK-34392][SQL] Support
ZoneOffset +h:mm in DateTimeUtils. getZoneId
This is an automated email from the ASF dual-hosted git repository.
srowen pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new daeae50 [SPARK-34392][SQL] Support ZoneOffset +h:mm in DateTimeUtils. getZoneId
daeae50 is described below
commit daeae5095a6202bfc7afa19cafde6c4b86a3613c
Author: ShiKai Wang <ws...@gmail.com>
AuthorDate: Fri Feb 26 11:03:20 2021 -0600
[SPARK-34392][SQL] Support ZoneOffset +h:mm in DateTimeUtils. getZoneId
### What changes were proposed in this pull request?
To support +8:00 in Spark3 when execute sql
`select to_utc_timestamp("2020-02-07 16:00:00", "GMT+8:00")`
### Why are the changes needed?
+8:00 this format is supported in PostgreSQL,hive, presto, but not supported in Spark3
https://issues.apache.org/jira/browse/SPARK-34392
### Does this PR introduce _any_ user-facing change?
no
### How was this patch tested?
unit test
Closes #31624 from Karl-WangSK/zone.
Lead-authored-by: ShiKai Wang <ws...@gmail.com>
Co-authored-by: Karl-WangSK <sh...@linkflowtech.com>
Signed-off-by: Sean Owen <sr...@gmail.com>
---
.../org/apache/spark/sql/catalyst/util/DateTimeUtils.scala | 5 ++++-
.../apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 13 +++++++++++++
.../scala/org/apache/spark/sql/internal/SQLConfSuite.scala | 5 ++---
3 files changed, 19 insertions(+), 4 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 87cf3c9..89cb67c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -50,7 +50,10 @@ object DateTimeUtils {
val TIMEZONE_OPTION = "timeZone"
- def getZoneId(timeZoneId: String): ZoneId = ZoneId.of(timeZoneId, ZoneId.SHORT_IDS)
+ def getZoneId(timeZoneId: String): ZoneId = {
+ // To support the (+|-)h:mm format because it was supported before Spark 3.0.
+ ZoneId.of(timeZoneId.replaceFirst("(\\+|\\-)(\\d):", "$10$2:"), ZoneId.SHORT_IDS)
+ }
def getTimeZone(timeZoneId: String): TimeZone = TimeZone.getTimeZone(getZoneId(timeZoneId))
/**
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 3d841f3..fb2d511 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -471,6 +471,13 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
test("2011-12-25 09:00:00.123456", JST.getId, "2011-12-25 18:00:00.123456")
test("2011-12-25 09:00:00.123456", LA.getId, "2011-12-25 01:00:00.123456")
test("2011-12-25 09:00:00.123456", "Asia/Shanghai", "2011-12-25 17:00:00.123456")
+ test("2011-12-25 09:00:00.123456", "-7", "2011-12-25 02:00:00.123456")
+ test("2011-12-25 09:00:00.123456", "+8:00", "2011-12-25 17:00:00.123456")
+ test("2011-12-25 09:00:00.123456", "+8:00:00", "2011-12-25 17:00:00.123456")
+ test("2011-12-25 09:00:00.123456", "+0800", "2011-12-25 17:00:00.123456")
+ test("2011-12-25 09:00:00.123456", "-071020", "2011-12-25 01:49:40.123456")
+ test("2011-12-25 09:00:00.123456", "-07:10:20", "2011-12-25 01:49:40.123456")
+
}
}
@@ -496,6 +503,12 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
test("2011-12-25 18:00:00.123456", JST.getId, "2011-12-25 09:00:00.123456")
test("2011-12-25 01:00:00.123456", LA.getId, "2011-12-25 09:00:00.123456")
test("2011-12-25 17:00:00.123456", "Asia/Shanghai", "2011-12-25 09:00:00.123456")
+ test("2011-12-25 02:00:00.123456", "-7", "2011-12-25 09:00:00.123456")
+ test("2011-12-25 17:00:00.123456", "+8:00", "2011-12-25 09:00:00.123456")
+ test("2011-12-25 17:00:00.123456", "+8:00:00", "2011-12-25 09:00:00.123456")
+ test("2011-12-25 17:00:00.123456", "+0800", "2011-12-25 09:00:00.123456")
+ test("2011-12-25 01:49:40.123456", "-071020", "2011-12-25 09:00:00.123456")
+ test("2011-12-25 01:49:40.123456", "-07:10:20", "2011-12-25 09:00:00.123456")
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 1ea2d4f..f5d1dc2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -414,13 +414,12 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "America/Chicago")
assert(sql(s"set ${SQLConf.SESSION_LOCAL_TIMEZONE.key}").head().getString(1) ===
"America/Chicago")
+ spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "GMT+8:00")
+ assert(sql(s"set ${SQLConf.SESSION_LOCAL_TIMEZONE.key}").head().getString(1) === "GMT+8:00")
intercept[IllegalArgumentException] {
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "pst")
}
- intercept[IllegalArgumentException] {
- spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "GMT+8:00")
- }
val e = intercept[IllegalArgumentException] {
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "Asia/shanghai")
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org