You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/06/03 18:03:14 UTC

[spark] branch master updated: Revert "[SPARK-31879][SQL] Using GB as default Locale for datetime formatters"

This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e61d0de  Revert "[SPARK-31879][SQL] Using GB as default Locale for datetime formatters"
e61d0de is described below

commit e61d0de11f0c06a6b7f2dadcee833a2bbf6aa2f3
Author: Wenchen Fan <we...@databricks.com>
AuthorDate: Thu Jun 4 01:54:22 2020 +0800

    Revert "[SPARK-31879][SQL] Using GB as default Locale for datetime formatters"
    
    This reverts commit c59f51bcc207725b8cbc4201df9367f874f5915c.
---
 .../apache/spark/sql/catalyst/util/DateFormatter.scala |  8 +-------
 .../spark/sql/catalyst/util/TimestampFormatter.scala   |  8 +-------
 .../src/test/resources/sql-tests/inputs/datetime.sql   |  4 ----
 .../resources/sql-tests/results/ansi/datetime.sql.out  | 18 +-----------------
 .../sql-tests/results/datetime-legacy.sql.out          | 18 +-----------------
 .../test/resources/sql-tests/results/datetime.sql.out  | 18 +-----------------
 6 files changed, 5 insertions(+), 69 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala
index 5bb92c6..6d225ad 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala
@@ -117,13 +117,7 @@ class LegacySimpleDateFormatter(pattern: String, locale: Locale) extends LegacyD
 object DateFormatter {
   import LegacyDateFormats._
 
-  /**
-   * Before Spark 3.0, the first day-of-week is always Monday. Since Spark 3.0, it depends on the
-   * locale.
-   * We pick GB as the default locale instead of US, to be compatible with Spark 2.x, as US locale
-   * uses Sunday as the first day-of-week. See SPARK-31879.
-   */
-  val defaultLocale: Locale = new Locale("en", "GB")
+  val defaultLocale: Locale = Locale.US
 
   val defaultPattern: String = "yyyy-MM-dd"
 
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
index 63a4c2e..97ecc43 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
@@ -278,13 +278,7 @@ object LegacyDateFormats extends Enumeration {
 object TimestampFormatter {
   import LegacyDateFormats._
 
-  /**
-   * Before Spark 3.0, the first day-of-week is always Monday. Since Spark 3.0, it depends on the
-   * locale.
-   * We pick GB as the default locale instead of US, to be compatible with Spark 2.x, as US locale
-   * uses Sunday as the first day-of-week. See SPARK-31879.
-   */
-  val defaultLocale: Locale = new Locale("en", "GB")
+  val defaultLocale: Locale = Locale.US
 
   def defaultPattern(): String = s"${DateFormatter.defaultPattern} HH:mm:ss"
 
diff --git a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
index 5636e0b..9bd936f 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
@@ -164,7 +164,3 @@ select from_csv('26/October/2015', 'date Date', map('dateFormat', 'dd/MMMMM/yyyy
 select from_unixtime(1, 'yyyyyyyyyyy-MM-dd');
 select date_format(timestamp '2018-11-17 13:33:33', 'yyyyyyyyyy-MM-dd HH:mm:ss');
 select date_format(date '2018-11-17', 'yyyyyyyyyyy-MM-dd');
-
--- SPARK-31879: the first day of week
-select date_format('2020-01-01', 'YYYY-MM-dd uu');
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu');
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out
index 3803460..ca04b00 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 121
+-- Number of queries: 119
 
 
 -- !query
@@ -1025,19 +1025,3 @@ struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
 You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyyyyyyyyy-MM-dd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
--- !query output
-2020-01-01 03
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
--- !query output
-2020-01-01 Wednesday
diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index 99dd14d..fe932d3 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 121
+-- Number of queries: 119
 
 
 -- !query
@@ -980,19 +980,3 @@ select date_format(date '2018-11-17', 'yyyyyyyyyyy-MM-dd')
 struct<date_format(CAST(DATE '2018-11-17' AS TIMESTAMP), yyyyyyyyyyy-MM-dd):string>
 -- !query output
 00000002018-11-17
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
--- !query output
-2020-01-01 03
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
--- !query output
-2020-01-01 0003
diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
index c8c568c..06a41da 100755
--- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 121
+-- Number of queries: 119
 
 
 -- !query
@@ -997,19 +997,3 @@ struct<>
 -- !query output
 org.apache.spark.SparkUpgradeException
 You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyyyyyyyyy-MM-dd' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uu):string>
--- !query output
-2020-01-01 03
-
-
--- !query
-select date_format('2020-01-01', 'YYYY-MM-dd uuuu')
--- !query schema
-struct<date_format(CAST(2020-01-01 AS TIMESTAMP), YYYY-MM-dd uuuu):string>
--- !query output
-2020-01-01 Wednesday


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org