[SPARK-28642][SQL] Hide credentials in SHOW CREATE TABLE

## What changes were proposed in this pull request?

[SPARK-17783](https://issues.apache.org/jira/browse/SPARK-17783) hided Credentials in `CREATE` and `DESC FORMATTED/EXTENDED` a PERSISTENT/TEMP Table for JDBC. But `SHOW
 CREATE TABLE` exposed the credentials:
```sql
spark-sql> show create table mysql_federated_sample;
CREATE TABLE `mysql_federated_sample` (`TBL_ID` BIGINT, `CREATE_TIME` INT, `DB_ID` BIGINT, `LAST_ACCESS_TIME` INT, `OWNER` STRING, `RETENTION` INT, `SD_ID` BIGINT, `TBL_NAME` STRING, `TBL_TYPE` STRING, `VIEW_EXPANDED_TEXT` STRING, `VIEW_ORIGINAL_TEXT` STRING, `IS_REWRITE_ENABLED` BOOLEAN)
USING org.apache.spark.sql.jdbc
OPTIONS (
  `url` 'jdbc:mysql://localhost/hive?user=root&password=mypasswd',
  `driver` 'com.mysql.jdbc.Driver',
  `dbtable` 'TBLS'
)
```

This pr fix this issue.

## How was this patch tested?

unit tests and manual tests:
```sql
spark-sql> show create table  mysql_federated_sample;
CREATE TABLE `mysql_federated_sample` (`TBL_ID` BIGINT, `CREATE_TIME` INT, `DB_ID` BIGINT, `LAST_ACCESS_TIME` INT, `OWNER` STRING, `RETENTION` INT, `SD_ID` BIGINT, `TBL_NAME` STRING, `TBL_TYPE` STRING, `VIEW_EXPANDED_TEXT` STRING, `VIEW_ORIGINAL_TEXT` STRING, `IS_REWRITE_ENABLED` BOOLEAN)
USING org.apache.spark.sql.jdbc
OPTIONS (
  `url` '*********(redacted)',
  `driver` 'com.mysql.jdbc.Driver',
  `dbtable` 'TBLS'
)
```

Closes #25375 from wangyum/SPARK-28642.

Authored-by: Yuming Wang <yumwang@ebay.com>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
This commit is contained in:
Yuming Wang 2019-08-08 16:24:43 -07:00 committed by Dongjoon Hyun
parent 8c0dc38640
commit d19a56f9db
2 changed files with 29 additions and 2 deletions

View file

@ -1099,7 +1099,7 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman
private def showDataSourceTableOptions(metadata: CatalogTable, builder: StringBuilder): Unit = {
builder ++= s"USING ${metadata.provider.get}\n"
val dataSourceOptions = metadata.storage.properties.map {
val dataSourceOptions = SQLConf.get.redactOptions(metadata.storage.properties).map {
case (key, value) => s"${quoteIdentifier(key)} '${escapeSingleQuotedString(value)}'"
}

View file

@ -26,10 +26,11 @@ import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.SparkException
import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeTestUtils}
import org.apache.spark.sql.execution.DataSourceScanExec
import org.apache.spark.sql.execution.command.ExplainCommand
import org.apache.spark.sql.execution.command.{ExplainCommand, ShowCreateTableCommand}
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JDBCPartition, JDBCRDD, JDBCRelation, JdbcUtils}
import org.apache.spark.sql.execution.metric.InputOutputMetricsHelper
@ -1029,6 +1030,32 @@ class JDBCSuite extends QueryTest
}
}
test("Hide credentials in show create table") {
val password = "testPass"
val tableName = "tab1"
withTable(tableName) {
sql(
s"""
|CREATE TABLE $tableName
|USING org.apache.spark.sql.jdbc
|OPTIONS (
| url '$urlWithUserAndPass',
| dbtable 'TEST.PEOPLE',
| user 'testUser',
| password '$password')
""".stripMargin)
val show = ShowCreateTableCommand(TableIdentifier(tableName))
spark.sessionState.executePlan(show).executedPlan.executeCollect().foreach { r =>
assert(!r.toString.contains(password))
}
sql(s"SHOW CREATE TABLE $tableName").collect().foreach { r =>
assert(!r.toString().contains(password))
}
}
}
test("SPARK 12941: The data type mapping for StringType to Oracle") {
val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
assert(oracleDialect.getJDBCType(StringType).