diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md index 2cfe8d6147..a5f4a4d947 100644 --- a/docs/sql-migration-guide.md +++ b/docs/sql-migration-guide.md @@ -59,7 +59,7 @@ license: | - In Spark 3.0, you can use `ADD FILE` to add file directories as well. Earlier you could add only single files using this command. To restore the behavior of earlier versions, set `spark.sql.legacy.addSingleFileInAddFile` to `true`. - - In Spark 3.0, `SHOW TBLPROPERTIES` throws `AnalysisException` if the table does not exist. In Spark version 2.4 and below, this scenario caused `NoSuchTableException`. Also, `SHOW TBLPROPERTIES` on a temporary view causes `AnalysisException`. In Spark version 2.4 and below, it returned an empty result. + - In Spark 3.0, `SHOW TBLPROPERTIES` throws `AnalysisException` if the table does not exist. In Spark version 2.4 and below, this scenario caused `NoSuchTableException`. - In Spark 3.0, `SHOW CREATE TABLE` always returns Spark DDL, even when the given table is a Hive SerDe table. For generating Hive DDL, use `SHOW CREATE TABLE AS SERDE` command instead. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 146df97d48..97750f467a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -3573,7 +3573,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging override def visitShowTblProperties( ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) { ShowTableProperties( - UnresolvedTable(visitMultipartIdentifier(ctx.table)), + UnresolvedTableOrView(visitMultipartIdentifier(ctx.table)), Option(ctx.key).map(visitTablePropertyKey)) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 572ea840eb..9c31f07f29 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -2010,11 +2010,11 @@ class DDLParserSuite extends AnalysisTest { test("SHOW TBLPROPERTIES table") { comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c"), - ShowTableProperties(UnresolvedTable(Seq("a", "b", "c")), None)) + ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), None)) comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c('propKey1')"), - ShowTableProperties(UnresolvedTable(Seq("a", "b", "c")), Some("propKey1"))) + ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), Some("propKey1"))) } test("DESCRIBE FUNCTION") { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index f74d768967..58a7251f4e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -562,6 +562,9 @@ class ResolveSessionCatalog( case ShowTableProperties(r: ResolvedTable, propertyKey) if isSessionCatalog(r.catalog) => ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey) + case ShowTableProperties(r: ResolvedView, propertyKey) => + ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey) + case DescribeFunctionStatement(nameParts, extended) => val functionIdent = parseSessionCatalogFunctionIdentifier(nameParts, "DESCRIBE FUNCTION") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 2c9ca36c7f..fc8cc11bb1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -918,15 +918,20 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio } override def run(sparkSession: SparkSession): Seq[Row] = { - val catalogTable = sparkSession.sessionState.catalog.getTableMetadata(table) - propertyKey match { - case Some(p) => - val propValue = catalogTable - .properties - .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p") - Seq(Row(propValue)) - case None => - catalogTable.properties.map(p => Row(p._1, p._2)).toSeq + val catalog = sparkSession.sessionState.catalog + if (catalog.isTemporaryTable(table)) { + Seq.empty[Row] + } else { + val catalogTable = catalog.getTableMetadata(table) + propertyKey match { + case Some(p) => + val propValue = catalogTable + .properties + .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p") + Seq(Row(propValue)) + case None => + catalogTable.properties.map(p => Row(p._1, p._2)).toSeq + } } } } diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tblproperties.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tblproperties.sql new file mode 100644 index 0000000000..2861b2b43a --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/show-tblproperties.sql @@ -0,0 +1,26 @@ +-- create a table with properties +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +TBLPROPERTIES('p1'='v1', 'p2'='v2'); + +SHOW TBLPROPERTIES tbl; +SHOW TBLPROPERTIES tbl("p1"); +SHOW TBLPROPERTIES tbl("p3"); + +DROP TABLE tbl; + +-- create a view with properties +CREATE VIEW view TBLPROPERTIES('p1'='v1', 'p2'='v2') AS SELECT 1 AS c1; + +SHOW TBLPROPERTIES view; +SHOW TBLPROPERTIES view("p1"); +SHOW TBLPROPERTIES view("p3"); + +DROP VIEW view; + +-- create a temporary view with properties +CREATE TEMPORARY VIEW tv TBLPROPERTIES('p1'='v1') AS SELECT 1 AS c1; + +-- Properties for a temporary view should be empty +SHOW TBLPROPERTIES tv; + +DROP VIEW tv; diff --git a/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out new file mode 100644 index 0000000000..6984b34c36 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out @@ -0,0 +1,114 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 13 + + +-- !query +CREATE TABLE tbl (a INT, b STRING, c INT) USING parquet +TBLPROPERTIES('p1'='v1', 'p2'='v2') +-- !query schema +struct<> +-- !query output + + + +-- !query +SHOW TBLPROPERTIES tbl +-- !query schema +struct +-- !query output +p1 v1 +p2 v2 + + +-- !query +SHOW TBLPROPERTIES tbl("p1") +-- !query schema +struct +-- !query output +v1 + + +-- !query +SHOW TBLPROPERTIES tbl("p3") +-- !query schema +struct +-- !query output +Table default.tbl does not have property: p3 + + +-- !query +DROP TABLE tbl +-- !query schema +struct<> +-- !query output + + + +-- !query +CREATE VIEW view TBLPROPERTIES('p1'='v1', 'p2'='v2') AS SELECT 1 AS c1 +-- !query schema +struct<> +-- !query output + + + +-- !query +SHOW TBLPROPERTIES view +-- !query schema +struct +-- !query output +p1 v1 +p2 v2 +view.catalogAndNamespace.numParts 2 +view.catalogAndNamespace.part.0 spark_catalog +view.catalogAndNamespace.part.1 default +view.query.out.col.0 c1 +view.query.out.numCols 1 + + +-- !query +SHOW TBLPROPERTIES view("p1") +-- !query schema +struct +-- !query output +v1 + + +-- !query +SHOW TBLPROPERTIES view("p3") +-- !query schema +struct +-- !query output +Table default.view does not have property: p3 + + +-- !query +DROP VIEW view +-- !query schema +struct<> +-- !query output + + + +-- !query +CREATE TEMPORARY VIEW tv TBLPROPERTIES('p1'='v1') AS SELECT 1 AS c1 +-- !query schema +struct<> +-- !query output + + + +-- !query +SHOW TBLPROPERTIES tv +-- !query schema +struct +-- !query output + + + +-- !query +DROP VIEW tv +-- !query schema +struct<> +-- !query output + diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index bd85ad4e49..dcec8bf5c0 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -179,7 +179,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto val message = intercept[AnalysisException] { sql("SHOW TBLPROPERTIES badtable") }.getMessage - assert(message.contains("Table not found: badtable")) + assert(message.contains("Table or view not found: badtable")) // When key is not found, a row containing the error is returned. checkAnswer( @@ -193,21 +193,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto checkAnswer(sql("SHOW TBLPROPERTIES parquet_tab2('`prop2Key`')"), Row("prop2Val")) } - test("show tblproperties for spark temporary table - AnalysisException is thrown") { - withTempView("parquet_temp") { - sql( - """ - |CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING) - |USING org.apache.spark.sql.parquet.DefaultSource - """.stripMargin) - - val message = intercept[AnalysisException] { - sql("SHOW TBLPROPERTIES parquet_temp") - }.getMessage - assert(message.contains("parquet_temp is a temp view not table")) - } - } - Seq(true, false).foreach { local => val loadQuery = if (local) "LOAD DATA LOCAL" else "LOAD DATA" test(loadQuery) {