You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2021/07/16 02:43:48 UTC
[spark] branch master updated: [SPARK-36169][SQL] Make
'spark.sql.sources.disabledJdbcConnProviderList' as a static conf (as
documneted)
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new fba61ad [SPARK-36169][SQL] Make 'spark.sql.sources.disabledJdbcConnProviderList' as a static conf (as documneted)
fba61ad is described below
commit fba61ad68bb12b22055d5d475e95e2681685eed7
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Fri Jul 16 11:43:22 2021 +0900
[SPARK-36169][SQL] Make 'spark.sql.sources.disabledJdbcConnProviderList' as a static conf (as documneted)
### What changes were proposed in this pull request?
This PR proposes to move `spark.sql.sources.disabledJdbcConnProviderList` from SQLConf to StaticSQLConf which disallows to set in runtime.
### Why are the changes needed?
It's documented as a static configuration. we should make it as a static configuration properly.
### Does this PR introduce _any_ user-facing change?
Previously, the configuration can be set to different value but not effective.
Now it throws an exception if users try to set in runtime.
### How was this patch tested?
Existing unittest was fixed. That should verify the change.
Closes #33381 from HyukjinKwon/SPARK-36169.
Authored-by: Hyukjin Kwon <gu...@apache.org>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../org/apache/spark/sql/internal/SQLConf.scala | 12 ++---------
.../apache/spark/sql/internal/StaticSQLConf.scala | 8 ++++++++
.../jdbc/connection/ConnectionProviderSuite.scala | 24 ++++++++++++++--------
3 files changed, 25 insertions(+), 19 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index e9c5f6e..f1bfb14 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -3272,15 +3272,6 @@ object SQLConf {
.booleanConf
.createWithDefault(false)
- val DISABLED_JDBC_CONN_PROVIDER_LIST =
- buildConf("spark.sql.sources.disabledJdbcConnProviderList")
- .internal()
- .doc("Configures a list of JDBC connection providers, which are disabled. " +
- "The list contains the name of the JDBC connection providers separated by comma.")
- .version("3.1.0")
- .stringConf
- .createWithDefault("")
-
val LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT =
buildConf("spark.sql.legacy.createHiveTableByDefault")
.internal()
@@ -4043,7 +4034,8 @@ class SQLConf extends Serializable with Logging {
def legacyPathOptionBehavior: Boolean = getConf(SQLConf.LEGACY_PATH_OPTION_BEHAVIOR)
- def disabledJdbcConnectionProviders: String = getConf(SQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST)
+ def disabledJdbcConnectionProviders: String = getConf(
+ StaticSQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST)
def charVarcharAsString: Boolean = getConf(SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala
index bfefca4..3be02f6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala
@@ -262,4 +262,12 @@ object StaticSQLConf {
.stringConf
.toSequence
.createWithDefault(Nil)
+
+ val DISABLED_JDBC_CONN_PROVIDER_LIST =
+ buildStaticConf("spark.sql.sources.disabledJdbcConnProviderList")
+ .doc("Configures a list of JDBC connection providers, which are disabled. " +
+ "The list contains the name of the JDBC connection providers separated by comma.")
+ .version("3.1.0")
+ .stringConf
+ .createWithDefault("")
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
index 71b0325..32d8fce 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
@@ -19,7 +19,8 @@ package org.apache.spark.sql.execution.datasources.jdbc.connection
import javax.security.auth.login.Configuration
-import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.internal.StaticSQLConf
import org.apache.spark.sql.test.SharedSparkSession
class ConnectionProviderSuite extends ConnectionProviderSuiteBase with SharedSparkSession {
@@ -37,14 +38,6 @@ class ConnectionProviderSuite extends ConnectionProviderSuiteBase with SharedSpa
assert(providers.size === 6)
}
- test("Disabled provider must not be loaded") {
- withSQLConf(SQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST.key -> "db2") {
- val providers = ConnectionProvider.loadProviders()
- assert(!providers.exists(_.isInstanceOf[DB2ConnectionProvider]))
- assert(providers.size === 5)
- }
- }
-
test("Multiple security configs must be reachable") {
Configuration.setConfiguration(null)
val postgresProvider = new PostgresConnectionProvider()
@@ -77,3 +70,16 @@ class ConnectionProviderSuite extends ConnectionProviderSuiteBase with SharedSpa
Configuration.setConfiguration(null)
}
}
+
+class DisallowedConnectionProviderSuite extends SharedSparkSession {
+
+ override protected def sparkConf: SparkConf =
+ super.sparkConf.set(
+ StaticSQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST.key, "db2")
+
+ test("Disabled provider must not be loaded") {
+ val providers = ConnectionProvider.loadProviders()
+ assert(!providers.exists(_.isInstanceOf[DB2ConnectionProvider]))
+ assert(providers.size === 5)
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org