[SPARK-30282][SQL][FOLLOWUP] SHOW TBLPROPERTIES should support views

### What changes were proposed in this pull request?

This PR addresses two things:
- `SHOW TBLPROPERTIES` should supports view (a regression introduced by #26921)
- `SHOW TBLPROPERTIES` on a temporary view should return empty result (2.4 behavior instead of throwing `AnalysisException`.

### Why are the changes needed?

It's a bug.

### Does this PR introduce any user-facing change?

Yes, now `SHOW TBLPROPERTIES` works on views:
```
scala> sql("CREATE VIEW view TBLPROPERTIES('p1'='v1', 'p2'='v2') AS SELECT 1 AS c1")
scala> sql("SHOW TBLPROPERTIES view").show(truncate=false)
+---------------------------------+-------------+
|key                              |value        |
+---------------------------------+-------------+
|view.catalogAndNamespace.numParts|2            |
|view.query.out.col.0             |c1           |
|view.query.out.numCols           |1            |
|p2                               |v2           |
|view.catalogAndNamespace.part.0  |spark_catalog|
|p1                               |v1           |
|view.catalogAndNamespace.part.1  |default      |
+---------------------------------+-------------+
```
And for a temporary view:
```
scala> sql("CREATE TEMPORARY VIEW tview TBLPROPERTIES('p1'='v1', 'p2'='v2') AS SELECT 1 AS c1")
scala> sql("SHOW TBLPROPERTIES tview").show(truncate=false)
+---+-----+
|key|value|
+---+-----+
+---+-----+
```

### How was this patch tested?

Added tests.

Closes #28375 from imback82/show_tblproperties_followup.

Authored-by: Terry Kim <yuminkim@gmail.com>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
This commit is contained in:
Terry Kim 2020-04-29 07:06:45 +00:00 committed by Wenchen Fan
parent ea525fe8c0
commit 36803031e8
8 changed files with 162 additions and 29 deletions

View file

@ -59,7 +59,7 @@ license: |
- In Spark 3.0, you can use `ADD FILE` to add file directories as well. Earlier you could add only single files using this command. To restore the behavior of earlier versions, set `spark.sql.legacy.addSingleFileInAddFile` to `true`.
- In Spark 3.0, `SHOW TBLPROPERTIES` throws `AnalysisException` if the table does not exist. In Spark version 2.4 and below, this scenario caused `NoSuchTableException`. Also, `SHOW TBLPROPERTIES` on a temporary view causes `AnalysisException`. In Spark version 2.4 and below, it returned an empty result.
- In Spark 3.0, `SHOW TBLPROPERTIES` throws `AnalysisException` if the table does not exist. In Spark version 2.4 and below, this scenario caused `NoSuchTableException`.
- In Spark 3.0, `SHOW CREATE TABLE` always returns Spark DDL, even when the given table is a Hive SerDe table. For generating Hive DDL, use `SHOW CREATE TABLE AS SERDE` command instead.

View file

@ -3573,7 +3573,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
override def visitShowTblProperties(
ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) {
ShowTableProperties(
UnresolvedTable(visitMultipartIdentifier(ctx.table)),
UnresolvedTableOrView(visitMultipartIdentifier(ctx.table)),
Option(ctx.key).map(visitTablePropertyKey))
}

View file

@ -2010,11 +2010,11 @@ class DDLParserSuite extends AnalysisTest {
test("SHOW TBLPROPERTIES table") {
comparePlans(
parsePlan("SHOW TBLPROPERTIES a.b.c"),
ShowTableProperties(UnresolvedTable(Seq("a", "b", "c")), None))
ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), None))
comparePlans(
parsePlan("SHOW TBLPROPERTIES a.b.c('propKey1')"),
ShowTableProperties(UnresolvedTable(Seq("a", "b", "c")), Some("propKey1")))
ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), Some("propKey1")))
}
test("DESCRIBE FUNCTION") {

View file

@ -562,6 +562,9 @@ class ResolveSessionCatalog(
case ShowTableProperties(r: ResolvedTable, propertyKey) if isSessionCatalog(r.catalog) =>
ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey)
case ShowTableProperties(r: ResolvedView, propertyKey) =>
ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey)
case DescribeFunctionStatement(nameParts, extended) =>
val functionIdent =
parseSessionCatalogFunctionIdentifier(nameParts, "DESCRIBE FUNCTION")

View file

@ -918,15 +918,20 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalogTable = sparkSession.sessionState.catalog.getTableMetadata(table)
propertyKey match {
case Some(p) =>
val propValue = catalogTable
.properties
.getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p")
Seq(Row(propValue))
case None =>
catalogTable.properties.map(p => Row(p._1, p._2)).toSeq
val catalog = sparkSession.sessionState.catalog
if (catalog.isTemporaryTable(table)) {
Seq.empty[Row]
} else {
val catalogTable = catalog.getTableMetadata(table)
propertyKey match {
case Some(p) =>
val propValue = catalogTable
.properties
.getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p")
Seq(Row(propValue))
case None =>
catalogTable.properties.map(p => Row(p._1, p._2)).toSeq
}
}
}
}

View file

@ -0,0 +1,26 @@
-- create a table with properties
CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet
TBLPROPERTIES('p1'='v1', 'p2'='v2');
SHOW TBLPROPERTIES tbl;
SHOW TBLPROPERTIES tbl("p1");
SHOW TBLPROPERTIES tbl("p3");
DROP TABLE tbl;
-- create a view with properties
CREATE VIEW view TBLPROPERTIES('p1'='v1', 'p2'='v2') AS SELECT 1 AS c1;
SHOW TBLPROPERTIES view;
SHOW TBLPROPERTIES view("p1");
SHOW TBLPROPERTIES view("p3");
DROP VIEW view;
-- create a temporary view with properties
CREATE TEMPORARY VIEW tv TBLPROPERTIES('p1'='v1') AS SELECT 1 AS c1;
-- Properties for a temporary view should be empty
SHOW TBLPROPERTIES tv;
DROP VIEW tv;

View file

@ -0,0 +1,114 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 13
-- !query
CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet
TBLPROPERTIES('p1'='v1', 'p2'='v2')
-- !query schema
struct<>
-- !query output
-- !query
SHOW TBLPROPERTIES tbl
-- !query schema
struct<key:string,value:string>
-- !query output
p1 v1
p2 v2
-- !query
SHOW TBLPROPERTIES tbl("p1")
-- !query schema
struct<value:string>
-- !query output
v1
-- !query
SHOW TBLPROPERTIES tbl("p3")
-- !query schema
struct<value:string>
-- !query output
Table default.tbl does not have property: p3
-- !query
DROP TABLE tbl
-- !query schema
struct<>
-- !query output
-- !query
CREATE VIEW view TBLPROPERTIES('p1'='v1', 'p2'='v2') AS SELECT 1 AS c1
-- !query schema
struct<>
-- !query output
-- !query
SHOW TBLPROPERTIES view
-- !query schema
struct<key:string,value:string>
-- !query output
p1 v1
p2 v2
view.catalogAndNamespace.numParts 2
view.catalogAndNamespace.part.0 spark_catalog
view.catalogAndNamespace.part.1 default
view.query.out.col.0 c1
view.query.out.numCols 1
-- !query
SHOW TBLPROPERTIES view("p1")
-- !query schema
struct<value:string>
-- !query output
v1
-- !query
SHOW TBLPROPERTIES view("p3")
-- !query schema
struct<value:string>
-- !query output
Table default.view does not have property: p3
-- !query
DROP VIEW view
-- !query schema
struct<>
-- !query output
-- !query
CREATE TEMPORARY VIEW tv TBLPROPERTIES('p1'='v1') AS SELECT 1 AS c1
-- !query schema
struct<>
-- !query output
-- !query
SHOW TBLPROPERTIES tv
-- !query schema
struct<key:string,value:string>
-- !query output
-- !query
DROP VIEW tv
-- !query schema
struct<>
-- !query output

View file

@ -179,7 +179,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
val message = intercept[AnalysisException] {
sql("SHOW TBLPROPERTIES badtable")
}.getMessage
assert(message.contains("Table not found: badtable"))
assert(message.contains("Table or view not found: badtable"))
// When key is not found, a row containing the error is returned.
checkAnswer(
@ -193,21 +193,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
checkAnswer(sql("SHOW TBLPROPERTIES parquet_tab2('`prop2Key`')"), Row("prop2Val"))
}
test("show tblproperties for spark temporary table - AnalysisException is thrown") {
withTempView("parquet_temp") {
sql(
"""
|CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING)
|USING org.apache.spark.sql.parquet.DefaultSource
""".stripMargin)
val message = intercept[AnalysisException] {
sql("SHOW TBLPROPERTIES parquet_temp")
}.getMessage
assert(message.contains("parquet_temp is a temp view not table"))
}
}
Seq(true, false).foreach { local =>
val loadQuery = if (local) "LOAD DATA LOCAL" else "LOAD DATA"
test(loadQuery) {