You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2020/02/04 13:27:04 UTC

[spark] branch branch-3.0 updated: [SPARK-30725][SQL] Make legacy SQL configs as internal configs

This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 0d19842  [SPARK-30725][SQL] Make legacy SQL configs as internal configs
0d19842 is described below

commit 0d1984269fc8f81de384529acca2b3f8584d2f1a
Author: Maxim Gekk <ma...@gmail.com>
AuthorDate: Tue Feb 4 21:17:05 2020 +0800

    [SPARK-30725][SQL] Make legacy SQL configs as internal configs
    
    ### What changes were proposed in this pull request?
    All legacy SQL configs are marked as internal configs. In particular, the following configs are updated as internals:
    - spark.sql.legacy.sizeOfNull
    - spark.sql.legacy.replaceDatabricksSparkAvro.enabled
    - spark.sql.legacy.typeCoercion.datetimeToString.enabled
    - spark.sql.legacy.looseUpcast
    - spark.sql.legacy.arrayExistsFollowsThreeValuedLogic
    
    ### Why are the changes needed?
    In general case, users shouldn't change legacy configs, so, they can be marked as internals.
    
    ### Does this PR introduce any user-facing change?
    No
    
    ### How was this patch tested?
    Should be tested by jenkins build and run tests.
    
    Closes #27448 from MaxGekk/legacy-internal-sql-conf.
    
    Authored-by: Maxim Gekk <ma...@gmail.com>
    Signed-off-by: Wenchen Fan <we...@databricks.com>
    (cherry picked from commit f2dd082544aeba5978d0c140d0194eedb969d132)
    Signed-off-by: Wenchen Fan <we...@databricks.com>
---
 .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala   | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 3ad3416..b94ddbd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -1916,6 +1916,7 @@ object SQLConf {
     .createWithDefault(Deflater.DEFAULT_COMPRESSION)
 
   val LEGACY_SIZE_OF_NULL = buildConf("spark.sql.legacy.sizeOfNull")
+    .internal()
     .doc("If it is set to true, size of null returns -1. This behavior was inherited from Hive. " +
       "The size function returns null for null input if the flag is disabled.")
     .booleanConf
@@ -1923,6 +1924,7 @@ object SQLConf {
 
   val LEGACY_REPLACE_DATABRICKS_SPARK_AVRO_ENABLED =
     buildConf("spark.sql.legacy.replaceDatabricksSparkAvro.enabled")
+      .internal()
       .doc("If it is set to true, the data source provider com.databricks.spark.avro is mapped " +
         "to the built-in but external Avro data source module for backward compatibility.")
       .booleanConf
@@ -2048,10 +2050,11 @@ object SQLConf {
 
   val LEGACY_CAST_DATETIME_TO_STRING =
     buildConf("spark.sql.legacy.typeCoercion.datetimeToString.enabled")
+      .internal()
       .doc("If it is set to true, date/timestamp will cast to string in binary comparisons " +
         "with String")
-    .booleanConf
-    .createWithDefault(false)
+      .booleanConf
+      .createWithDefault(false)
 
   val DEFAULT_CATALOG = buildConf("spark.sql.defaultCatalog")
     .doc("Name of the default catalog. This will be the current catalog if users have not " +
@@ -2071,6 +2074,7 @@ object SQLConf {
       .createOptional
 
   val LEGACY_LOOSE_UPCAST = buildConf("spark.sql.legacy.looseUpcast")
+    .internal()
     .doc("When true, the upcast will be loose and allows string to atomic types.")
     .booleanConf
     .createWithDefault(false)
@@ -2083,6 +2087,7 @@ object SQLConf {
 
   val LEGACY_ARRAY_EXISTS_FOLLOWS_THREE_VALUED_LOGIC =
     buildConf("spark.sql.legacy.arrayExistsFollowsThreeValuedLogic")
+      .internal()
       .doc("When true, the ArrayExists will follow the three-valued boolean logic.")
       .booleanConf
       .createWithDefault(true)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org