You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2023/02/16 00:29:18 UTC

[spark] branch branch-3.4 updated: [SPARK-42455][SQL] Rename JDBC option inferTimestampNTZType as preferTimestampNTZ

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new d3a57df00fc [SPARK-42455][SQL] Rename JDBC option inferTimestampNTZType as preferTimestampNTZ
d3a57df00fc is described below

commit d3a57df00fcf5b088315c14cb405b4397a745bc1
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Thu Feb 16 09:28:35 2023 +0900

    [SPARK-42455][SQL] Rename JDBC option inferTimestampNTZType as preferTimestampNTZ
    
    Similar with https://github.com/apache/spark/pull/37327, this PR renames the JDBC data source option `inferTimestampNTZType` as `preferTimestampNTZ`
    
    It is simpler and more straightforward. Also, it is consistent with the CSV data source option introduced in https://github.com/apache/spark/pull/37327,
    
    No, the TimestampNTZ project is not released yet.
    
    UT
    
    Closes #40042 from gengliangwang/inferNTZOption.
    
    Authored-by: Gengliang Wang <ge...@apache.org>
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
    (cherry picked from commit 8194522225240a2192b9132858d0a324c0e94eb2)
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
 docs/sql-data-sources-jdbc.md                                       | 2 +-
 .../apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala   | 6 +++---
 .../org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala   | 2 +-
 .../org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 2 +-
 sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala   | 4 ++--
 5 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/docs/sql-data-sources-jdbc.md b/docs/sql-data-sources-jdbc.md
index 1ce411db190..ef11a3a77dd 100644
--- a/docs/sql-data-sources-jdbc.md
+++ b/docs/sql-data-sources-jdbc.md
@@ -365,7 +365,7 @@ logging into the data sources.
     <td>read/write</td>
   </tr>
   <tr>
-    <td><code>inferTimestampNTZType</code></td>
+    <td><code>preferTimestampNTZ</code></td>
     <td>false</td>
     <td>
       When the option is set to <code>true</code>, all timestamps are inferred as TIMESTAMP WITHOUT TIME ZONE.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
index 916ed99303b..148cd9e9335 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
@@ -234,9 +234,9 @@ class JDBCOptions(
   val prepareQuery = parameters.get(JDBC_PREPARE_QUERY).map(_ + " ").getOrElse("")
 
   // Infers timestamp values as TimestampNTZ type when reading data.
-  val inferTimestampNTZType =
+  val preferTimestampNTZ =
     parameters
-      .get(JDBC_INFER_TIMESTAMP_NTZ)
+      .get(JDBC_PREFER_TIMESTAMP_NTZ)
       .map(_.toBoolean)
       .getOrElse(SQLConf.get.timestampType == TimestampNTZType)
 }
@@ -301,5 +301,5 @@ object JDBCOptions {
   val JDBC_REFRESH_KRB5_CONFIG = newOption("refreshKrb5Config")
   val JDBC_CONNECTION_PROVIDER = newOption("connectionProvider")
   val JDBC_PREPARE_QUERY = newOption("prepareQuery")
-  val JDBC_INFER_TIMESTAMP_NTZ = newOption("inferTimestampNTZType")
+  val JDBC_PREFER_TIMESTAMP_NTZ = newOption("preferTimestampNTZ")
 }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala
index e23fe05a8a4..b90abd014ea 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala
@@ -68,7 +68,7 @@ object JDBCRDD extends Logging {
         val rs = statement.executeQuery()
         try {
           JdbcUtils.getSchema(rs, dialect, alwaysNullable = true,
-            isTimestampNTZ = options.inferTimestampNTZType)
+            isTimestampNTZ = options.preferTimestampNTZ)
         } finally {
           rs.close()
         }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 4b0d461e237..6b3e355793d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -251,7 +251,7 @@ object JdbcUtils extends Logging with SQLConfHelper {
       try {
         statement.setQueryTimeout(options.queryTimeout)
         Some(getSchema(statement.executeQuery(), dialect,
-          isTimestampNTZ = options.inferTimestampNTZType))
+          isTimestampNTZ = options.preferTimestampNTZ))
       } catch {
         case _: SQLException => None
       } finally {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 975c2886948..aa66fcd5304 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -1951,7 +1951,7 @@ class JDBCSuite extends QueryTest with SharedSparkSession {
       } else {
         TimestampType
       }
-      val res = readDf.option("inferTimestampNTZType", inferTimestampNTZ).load()
+      val res = readDf.option("preferTimestampNTZ", inferTimestampNTZ).load()
       checkAnswer(res, Seq(Row(null)))
       assert(res.schema.fields.head.dataType == tsType)
       withSQLConf(SQLConf.TIMESTAMP_TYPE.key -> timestampType) {
@@ -1984,7 +1984,7 @@ class JDBCSuite extends QueryTest with SharedSparkSession {
           DateTimeTestUtils.withDefaultTimeZone(zoneId) {
             // Infer TimestmapNTZ column with data source option
             val res = spark.read.format("jdbc")
-              .option("inferTimestampNTZType", "true")
+              .option("preferTimestampNTZ", "true")
               .option("url", urlWithUserAndPass)
               .option("dbtable", tableName)
               .load()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org