You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/04/29 07:21:13 UTC
[spark] branch branch-3.0 updated:
[SPARK-31557][SQL][TESTS][FOLLOWUP] Check rebasing in all legacy formatters
This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new b4e63ac [SPARK-31557][SQL][TESTS][FOLLOWUP] Check rebasing in all legacy formatters
b4e63ac is described below
commit b4e63acc14fcdd48af0d0edfe44f482c901f9cf9
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Wed Apr 29 07:19:34 2020 +0000
[SPARK-31557][SQL][TESTS][FOLLOWUP] Check rebasing in all legacy formatters
### What changes were proposed in this pull request?
- Check all available legacy formats in the tests added by https://github.com/apache/spark/pull/28345
- Check dates rebasing in legacy parsers for only one direction either days -> string or string -> days.
### Why are the changes needed?
Round trip tests can hide issues in dates rebasing. For example, if we remove rebasing from legacy parsers (from `parse()` and `format()`) the tests will pass.
### Does this PR introduce any user-facing change?
No
### How was this patch tested?
By running `DateFormatterSuite`.
Closes #28398 from MaxGekk/test-rebasing-in-legacy-date-formatter.
Authored-by: Max Gekk <ma...@gmail.com>
Signed-off-by: Wenchen Fan <we...@databricks.com>
(cherry picked from commit 73eac7565d7cd185d12a46637703ebff73649e40)
Signed-off-by: Wenchen Fan <we...@databricks.com>
---
.../apache/spark/sql/util/DateFormatterSuite.scala | 98 ++++++++++++++--------
1 file changed, 64 insertions(+), 34 deletions(-)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/util/DateFormatterSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/util/DateFormatterSuite.scala
index 2df1d49..5e2b6a7 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/util/DateFormatterSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/util/DateFormatterSuite.scala
@@ -50,23 +50,29 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
test("roundtrip date -> days -> date") {
LegacyBehaviorPolicy.values.foreach { parserPolicy =>
withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> parserPolicy.toString) {
- Seq(
- "0050-01-01",
- "0953-02-02",
- "1423-03-08",
- "1582-10-15",
- "1969-12-31",
- "1972-08-25",
- "1975-09-26",
- "2018-12-12",
- "2038-01-01",
- "5010-11-17").foreach { date =>
- DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
- withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
- val formatter = DateFormatter(getZoneId(timeZone))
- val days = formatter.parse(date)
- val formatted = formatter.format(days)
- assert(date === formatted)
+ LegacyDateFormats.values.foreach { legacyFormat =>
+ Seq(
+ "0050-01-01",
+ "0953-02-02",
+ "1423-03-08",
+ "1582-10-15",
+ "1969-12-31",
+ "1972-08-25",
+ "1975-09-26",
+ "2018-12-12",
+ "2038-01-01",
+ "5010-11-17").foreach { date =>
+ DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
+ withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
+ val formatter = DateFormatter(
+ DateFormatter.defaultPattern,
+ getZoneId(timeZone),
+ DateFormatter.defaultLocale,
+ legacyFormat)
+ val days = formatter.parse(date)
+ val formatted = formatter.format(days)
+ assert(date === formatted)
+ }
}
}
}
@@ -77,23 +83,29 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
test("roundtrip days -> date -> days") {
LegacyBehaviorPolicy.values.foreach { parserPolicy =>
withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> parserPolicy.toString) {
- Seq(
- -701265,
- -371419,
- -199722,
- -1,
- 0,
- 967,
- 2094,
- 17877,
- 24837,
- 1110657).foreach { days =>
- DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
- withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
- val formatter = DateFormatter(getZoneId(timeZone))
- val date = formatter.format(days)
- val parsed = formatter.parse(date)
- assert(days === parsed)
+ LegacyDateFormats.values.foreach { legacyFormat =>
+ Seq(
+ -701265,
+ -371419,
+ -199722,
+ -1,
+ 0,
+ 967,
+ 2094,
+ 17877,
+ 24837,
+ 1110657).foreach { days =>
+ DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
+ withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
+ val formatter = DateFormatter(
+ DateFormatter.defaultPattern,
+ getZoneId(timeZone),
+ DateFormatter.defaultLocale,
+ legacyFormat)
+ val date = formatter.format(days)
+ val parsed = formatter.parse(date)
+ assert(days === parsed)
+ }
}
}
}
@@ -146,4 +158,22 @@ class DateFormatterSuite extends SparkFunSuite with SQLHelper {
assert(formatter2.parse("BC 1234-02-22") === localDateToDays(LocalDate.of(-1233, 2, 22)))
assert(formatter2.parse("AD 1234-02-22") === localDateToDays(LocalDate.of(1234, 2, 22)))
}
+
+ test("SPARK-31557: rebasing in legacy formatters/parsers") {
+ withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> LegacyBehaviorPolicy.LEGACY.toString) {
+ LegacyDateFormats.values.foreach { legacyFormat =>
+ DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone =>
+ withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) {
+ val formatter = DateFormatter(
+ DateFormatter.defaultPattern,
+ getZoneId(timeZone),
+ DateFormatter.defaultLocale,
+ legacyFormat)
+ assert(LocalDate.ofEpochDay(formatter.parse("1000-01-01")) === LocalDate.of(1000, 1, 1))
+ assert(formatter.format(localDateToDays(LocalDate.of(1000, 1, 1))) === "1000-01-01")
+ }
+ }
+ }
+ }
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org