[SPARK-36169][SQL] Make 'spark.sql.sources.disabledJdbcConnProviderList' as a static conf (as documneted)

### What changes were proposed in this pull request?

This PR proposes to move `spark.sql.sources.disabledJdbcConnProviderList` from SQLConf to StaticSQLConf which disallows to set in runtime.

### Why are the changes needed?

It's documented as a static configuration. we should make it as a static configuration properly.

### Does this PR introduce _any_ user-facing change?

Previously, the configuration can be set to different value but not effective.
Now it throws an exception if users try to set in runtime.

### How was this patch tested?

Existing unittest was fixed. That should verify the change.

Closes #33381 from HyukjinKwon/SPARK-36169.

Authored-by: Hyukjin Kwon <gurwls223@apache.org>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
This commit is contained in:
Hyukjin Kwon 2021-07-16 11:43:22 +09:00
parent f66153de78
commit fba61ad68b
3 changed files with 25 additions and 19 deletions

View file

@ -3272,15 +3272,6 @@ object SQLConf {
.booleanConf
.createWithDefault(false)
val DISABLED_JDBC_CONN_PROVIDER_LIST =
buildConf("spark.sql.sources.disabledJdbcConnProviderList")
.internal()
.doc("Configures a list of JDBC connection providers, which are disabled. " +
"The list contains the name of the JDBC connection providers separated by comma.")
.version("3.1.0")
.stringConf
.createWithDefault("")
val LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT =
buildConf("spark.sql.legacy.createHiveTableByDefault")
.internal()
@ -4043,7 +4034,8 @@ class SQLConf extends Serializable with Logging {
def legacyPathOptionBehavior: Boolean = getConf(SQLConf.LEGACY_PATH_OPTION_BEHAVIOR)
def disabledJdbcConnectionProviders: String = getConf(SQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST)
def disabledJdbcConnectionProviders: String = getConf(
StaticSQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST)
def charVarcharAsString: Boolean = getConf(SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING)

View file

@ -262,4 +262,12 @@ object StaticSQLConf {
.stringConf
.toSequence
.createWithDefault(Nil)
val DISABLED_JDBC_CONN_PROVIDER_LIST =
buildStaticConf("spark.sql.sources.disabledJdbcConnProviderList")
.doc("Configures a list of JDBC connection providers, which are disabled. " +
"The list contains the name of the JDBC connection providers separated by comma.")
.version("3.1.0")
.stringConf
.createWithDefault("")
}

View file

@ -19,7 +19,8 @@ package org.apache.spark.sql.execution.datasources.jdbc.connection
import javax.security.auth.login.Configuration
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.SparkConf
import org.apache.spark.sql.internal.StaticSQLConf
import org.apache.spark.sql.test.SharedSparkSession
class ConnectionProviderSuite extends ConnectionProviderSuiteBase with SharedSparkSession {
@ -37,14 +38,6 @@ class ConnectionProviderSuite extends ConnectionProviderSuiteBase with SharedSpa
assert(providers.size === 6)
}
test("Disabled provider must not be loaded") {
withSQLConf(SQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST.key -> "db2") {
val providers = ConnectionProvider.loadProviders()
assert(!providers.exists(_.isInstanceOf[DB2ConnectionProvider]))
assert(providers.size === 5)
}
}
test("Multiple security configs must be reachable") {
Configuration.setConfiguration(null)
val postgresProvider = new PostgresConnectionProvider()
@ -77,3 +70,16 @@ class ConnectionProviderSuite extends ConnectionProviderSuiteBase with SharedSpa
Configuration.setConfiguration(null)
}
}
class DisallowedConnectionProviderSuite extends SharedSparkSession {
override protected def sparkConf: SparkConf =
super.sparkConf.set(
StaticSQLConf.DISABLED_JDBC_CONN_PROVIDER_LIST.key, "db2")
test("Disabled provider must not be loaded") {
val providers = ConnectionProvider.loadProviders()
assert(!providers.exists(_.isInstanceOf[DB2ConnectionProvider]))
assert(providers.size === 5)
}
}