You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/06/03 18:12:03 UTC

[spark] branch branch-3.0 updated: Revert "[SPARK-31879][SQL] Using GB as default Locale for datetime formatters"

This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 08198fa  Revert "[SPARK-31879][SQL] Using GB as default Locale for datetime formatters"
08198fa is described below

commit 08198fa10aa45368b90aad2e2eb9eb216461032d
Author: Wenchen Fan <we...@databricks.com>
AuthorDate: Thu Jun 4 02:01:49 2020 +0800

    Revert "[SPARK-31879][SQL] Using GB as default Locale for datetime formatters"
    
    This reverts commit 1b7ae62bf443f20c94daa6ea5cabbd18f96b7919.
---
 .../apache/spark/sql/catalyst/util/DateFormatter.scala |  8 +-------
 .../spark/sql/catalyst/util/TimestampFormatter.scala   |  8 +-------
 .../src/test/resources/sql-tests/inputs/datetime.sql   |  4 ----
 .../resources/sql-tests/results/ansi/datetime.sql.out  | 18 +-----------------
 .../sql-tests/results/datetime-legacy.sql.out          | 18 +-----------------
 .../test/resources/sql-tests/results/datetime.sql.out  | 18 +-----------------
 6 files changed, 5 insertions(+), 69 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala
index e178164..b3347eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala
@@ -118,13 +118,7 @@ class LegacySimpleDateFormatter(pattern: String, locale: Locale) extends LegacyD
 object DateFormatter {
   import LegacyDateFormats._
 
-  /**
-   * Before Spark 3.0, the first day-of-week is always Monday. Since Spark 3.0, it depends on the
-   * locale.
-   * We pick GB as the default locale instead of US, to be compatible with Spark 2.x, as US locale
-   * uses Sunday as the first day-of-week. See SPARK-31879.
-   */
-  val defaultLocale: Locale = new Locale("en", "GB")
+  val defaultLocale: Locale = Locale.US
 
   val defaultPattern: String = "yyyy-MM-dd"
 
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
index 0873b85..e8866d7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
@@ -257,13 +257,7 @@ object LegacyDateFormats extends Enumeration {
 object TimestampFormatter {
   import LegacyDateFormats._
 
-  /**
-   * Before Spark 3.0, the first day-of-week is always Monday. Since Spark 3.0, it depends on the
-   * locale.
-   * We pick GB as the default locale instead of US, to be compatible with Spark 2.x, as US locale
-   * uses Sunday as the first day-of-week. See SPARK-31879.
-   */
-  val defaultLocale: Locale = new Locale("en", "GB")
+  val defaultLocale: Locale = Locale.US
 
   def defaultPattern(): String = s"${DateFormatter.defaultPattern} HH:mm:ss"
 
diff --git a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
index e955c78..4eefa0f 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
@@ -154,7 +154,3 @@ select from_csv('26/October/2015', 'date Date', map('dateFormat', 'dd/MMMMM/yyyy
 select from_unixtime(1, 'yyyyyyyyyyy-MM-dd');
 select date_format(timestamp '2018-11-17 13:33:33', 'yyyyyyyyyy-MM-dd HH:mm:ss');
 select date_format(date '2018-11-17', 'yyyyyyyyyyy-MM-dd');
-
--- SPARK-31879: the first day of week
-select date_format('2020-01-01', 'YYYY-MM-dd uu');
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu');
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out
index c3d10b0..43fe0a6 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 114
+-- Number of queries: 112
 
 
 -- !query
@@ -965,19 +965,3 @@ struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
 You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyyyyyyyyy-MM-dd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
--- !query output
-2020-01-01 03
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
--- !query output
-2020-01-01 Wednesday
diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index ac50c5b..71b1064 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 114
+-- Number of queries: 112
 
 
 -- !query
@@ -920,19 +920,3 @@ select date_format(date '2018-11-17', 'yyyyyyyyyyy-MM-dd')
 struct<date_format(CAST(DATE '2018-11-17' AS TIMESTAMP), yyyyyyyyyyy-MM-dd):string>
 -- !query output
 00000002018-11-17
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
--- !query output
-2020-01-01 03
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
--- !query output
-2020-01-01 0003
diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
index fd037db..9b1c847 100755
--- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 114
+-- Number of queries: 112
 
 
 -- !query
@@ -937,19 +937,3 @@ struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
 You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyyyyyyyyy-MM-dd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
--- !query output
-2020-01-01 03
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
--- !query output
-2020-01-01 Wednesday


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org