[SPARK-37888][SQL][TESTS] Unify v1 and v2 `DESCRIBE TABLE` tests

### What changes were proposed in this pull request?
1. Move `DESCRIBE TABLE` parsing tests to `DescribeRelationParserSuite`.
2. Put common `DESCRIBE TABLE` tests into one trait `org.apache.spark.sql.execution.command.DescribeTableSuiteBase`, and put datasource specific tests to the `v1.DescribeTableSuite` and `v2.DescribeTableSuite`.

The changes follow the approach of #30287.

Closes #36671.

### Why are the changes needed?
1. The unification will allow to run common `DESCRIBE TABLE` tests for both DSv1/Hive DSv1 and DSv2
2. We can detect missing features and differences between DSv1 and DSv2 implementations.

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
By running the modified test suites:
```
$ build/sbt "testOnly *DDLParserSuite"
$ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *HiveDDLSuite"
```
and new test suites:
```
$ build/sbt "sql/test:testOnly *DescribeTableParserSuite"
$ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *DescribeTableSuite"
```

Closes #36912 from MaxGekk/unify-describe-table-tests-4.

Authored-by: Max Gekk <max.gekk@gmail.com>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
master
Max Gekk 2022-06-21 10:45:27 +03:00
parent 40998da185
commit 42d33e1336
8 changed files with 414 additions and 135 deletions

View File

@ -1213,21 +1213,6 @@ class DDLParserSuite extends AnalysisTest {
"The feature is not supported: DESC TABLE COLUMN for a specific partition."))
}
test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") {
comparePlans(parsePlan("describe t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = false))
comparePlans(parsePlan("describe table t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = false))
comparePlans(parsePlan("describe table extended t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = true))
comparePlans(parsePlan("describe table formatted t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = true))
}
test("insert table: basic append") {
Seq(
"INSERT INTO TABLE testcat.ns1.ns2.tbl SELECT * FROM source",

View File

@ -36,7 +36,7 @@ import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
import org.apache.spark.sql.internal.SQLConf.{PARTITION_OVERWRITE_MODE, PartitionOverwriteMode, V2_SESSION_CATALOG_IMPLEMENTATION}
import org.apache.spark.sql.internal.connector.SimpleTableProvider
import org.apache.spark.sql.sources.SimpleScanSource
import org.apache.spark.sql.types.{BooleanType, LongType, MetadataBuilder, StringType, StructField, StructType}
import org.apache.spark.sql.types.{LongType, MetadataBuilder, StringType, StructField, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
@ -89,71 +89,6 @@ class DataSourceV2SQLSuite
checkAnswer(spark.internalCreateDataFrame(rdd, table.schema), Seq.empty)
}
test("DescribeTable using v2 catalog") {
spark.sql("CREATE TABLE testcat.table_name (id bigint, data string)" +
" USING foo" +
" PARTITIONED BY (id)")
val descriptionDf = spark.sql("DESCRIBE TABLE testcat.table_name")
assert(descriptionDf.schema.map(field => (field.name, field.dataType)) ===
Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
val description = descriptionDf.collect()
assert(description === Seq(
Row("id", "bigint", ""),
Row("data", "string", ""),
Row("", "", ""),
Row("# Partitioning", "", ""),
Row("Part 0", "id", "")))
val e = intercept[AnalysisException] {
sql("DESCRIBE TABLE testcat.table_name PARTITION (id = 1)")
}
assert(e.message.contains("DESCRIBE does not support partition for v2 tables"))
}
test("DescribeTable with v2 catalog when table does not exist.") {
intercept[AnalysisException] {
spark.sql("DESCRIBE TABLE testcat.table_name")
}
}
test("DescribeTable extended using v2 catalog") {
spark.sql("CREATE TABLE testcat.table_name (id bigint, data string)" +
" USING foo" +
" PARTITIONED BY (id)" +
" TBLPROPERTIES ('bar'='baz', 'password' = 'password')" +
" COMMENT 'this is a test table'" +
" LOCATION 'file:/tmp/testcat/table_name'")
val descriptionDf = spark.sql("DESCRIBE TABLE EXTENDED testcat.table_name")
assert(descriptionDf.schema.map(field => (field.name, field.dataType))
=== Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
assert(descriptionDf.collect()
.map(_.toSeq)
.map(_.toArray.map(_.toString.trim)) === Array(
Array("id", "bigint", ""),
Array("data", "string", ""),
Array("", "", ""),
Array("# Partitioning", "", ""),
Array("Part 0", "id", ""),
Array("", "", ""),
Array("# Metadata Columns", "", ""),
Array("index", "int", "Metadata column used to conflict with a data column"),
Array("_partition", "string", "Partition key used to store the row"),
Array("", "", ""),
Array("# Detailed Table Information", "", ""),
Array("Name", "testcat.table_name", ""),
Array("Comment", "this is a test table", ""),
Array("Location", "file:/tmp/testcat/table_name", ""),
Array("Provider", "foo", ""),
Array(TableCatalog.PROP_OWNER.capitalize, defaultUser, ""),
Array("Table Properties", "[bar=baz,password=*********(redacted)]", "")))
}
test("Describe column for v2 catalog") {
val t = "testcat.tbl"
withTable(t) {
@ -2410,49 +2345,6 @@ class DataSourceV2SQLSuite
}
}
test("SPARK-34561: drop/add columns to a dataset of `DESCRIBE TABLE`") {
val tbl = s"${catalogAndNamespace}tbl"
withTable(tbl) {
sql(s"CREATE TABLE $tbl (c0 INT) USING $v2Format")
val description = sql(s"DESCRIBE TABLE $tbl")
val noCommentDataset = description.drop("comment")
val expectedSchema = new StructType()
.add(
name = "col_name",
dataType = StringType,
nullable = false,
metadata = new MetadataBuilder().putString("comment", "name of the column").build())
.add(
name = "data_type",
dataType = StringType,
nullable = false,
metadata = new MetadataBuilder().putString("comment", "data type of the column").build())
assert(noCommentDataset.schema === expectedSchema)
val isNullDataset = noCommentDataset
.withColumn("is_null", noCommentDataset("col_name").isNull)
assert(isNullDataset.schema === expectedSchema.add("is_null", BooleanType, false))
}
}
test("SPARK-34576: drop/add columns to a dataset of `DESCRIBE COLUMN`") {
val tbl = s"${catalogAndNamespace}tbl"
withTable(tbl) {
sql(s"CREATE TABLE $tbl (c0 INT) USING $v2Format")
val description = sql(s"DESCRIBE TABLE $tbl c0")
val noCommentDataset = description.drop("info_value")
val expectedSchema = new StructType()
.add(
name = "info_name",
dataType = StringType,
nullable = false,
metadata = new MetadataBuilder().putString("comment", "name of the column info").build())
assert(noCommentDataset.schema === expectedSchema)
val isNullDataset = noCommentDataset
.withColumn("is_null", noCommentDataset("info_name").isNull)
assert(isNullDataset.schema === expectedSchema.add("is_null", BooleanType, false))
}
}
test("SPARK-34923: do not propagate metadata columns through Project") {
val t1 = s"${catalogAndNamespace}table"
withTable(t1) {

View File

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTableOrView}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.plans.logical.DescribeRelation
class DescribeTableParserSuite extends AnalysisTest {
test("SPARK-17328: Fix NPE with EXPLAIN DESCRIBE TABLE") {
comparePlans(parsePlan("describe t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = false))
comparePlans(parsePlan("describe table t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = false))
comparePlans(parsePlan("describe table extended t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = true))
comparePlans(parsePlan("describe table formatted t"),
DescribeRelation(
UnresolvedTableOrView(Seq("t"), "DESCRIBE TABLE", true), Map.empty, isExtended = true))
}
}

View File

@ -0,0 +1,87 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import org.apache.spark.sql.{AnalysisException, QueryTest}
import org.apache.spark.sql.types.{BooleanType, MetadataBuilder, StringType, StructType}
/**
* This base suite contains unified tests for the `DESCRIBE TABLE` command that check V1 and V2
* table catalogs. The tests that cannot run for all supported catalogs are located in more
* specific test suites:
*
* - V2 table catalog tests: `org.apache.spark.sql.execution.command.v2.DescribeTableSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.DescribeTableSuiteBase`
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.DescribeTableSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.DescribeTableSuite`
*/
trait DescribeTableSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "DESCRIBE TABLE"
test("DESCRIBE TABLE in a catalog when table does not exist") {
withNamespaceAndTable("ns", "table") { tbl =>
val e = intercept[AnalysisException] {
sql(s"DESCRIBE TABLE ${tbl}_non_existence")
}
assert(e.getMessage.contains(s"Table or view not found: ${tbl}_non_existence"))
}
}
test("SPARK-34561: drop/add columns to a dataset of `DESCRIBE TABLE`") {
withNamespaceAndTable("ns", "table") { tbl =>
sql(s"CREATE TABLE $tbl (c0 INT) $defaultUsing")
val description = sql(s"DESCRIBE TABLE $tbl")
val noCommentDataset = description.drop("comment")
val expectedSchema = new StructType()
.add(
name = "col_name",
dataType = StringType,
nullable = false,
metadata = new MetadataBuilder().putString("comment", "name of the column").build())
.add(
name = "data_type",
dataType = StringType,
nullable = false,
metadata = new MetadataBuilder().putString("comment", "data type of the column").build())
assert(noCommentDataset.schema === expectedSchema)
val isNullDataset = noCommentDataset
.withColumn("is_null", noCommentDataset("col_name").isNull)
assert(isNullDataset.schema === expectedSchema.add("is_null", BooleanType, false))
}
}
test("SPARK-34576: drop/add columns to a dataset of `DESCRIBE COLUMN`") {
withNamespaceAndTable("ns", "table") { tbl =>
sql(s"CREATE TABLE $tbl (c0 INT) $defaultUsing")
val description = sql(s"DESCRIBE TABLE $tbl c0")
val noCommentDataset = description.drop("info_value")
val expectedSchema = new StructType()
.add(
name = "info_name",
dataType = StringType,
nullable = false,
metadata = new MetadataBuilder().putString("comment", "name of the column info").build())
assert(noCommentDataset.schema === expectedSchema)
val isNullDataset = noCommentDataset
.withColumn("is_null", noCommentDataset("info_name").isNull)
assert(isNullDataset.schema === expectedSchema.add("is_null", BooleanType, false))
}
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.types.StringType
/**
* This base suite contains unified tests for the `DESCRIBE TABLE` command that checks V1
* table catalogs. The tests that cannot run for all V1 catalogs are located in more
* specific test suites:
*
* - V1 In-Memory catalog: `org.apache.spark.sql.execution.command.v1.DescribeTableSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.DescribeTableSuite`
*/
trait DescribeTableSuiteBase extends command.DescribeTableSuiteBase
with command.TestsV1AndV2Commands {
test("DESCRIBE TABLE with non-'partitioned-by' clause") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing")
val descriptionDf = spark.sql(s"DESCRIBE TABLE $tbl")
assert(descriptionDf.schema.map(field => (field.name, field.dataType)) ===
Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
QueryTest.checkAnswer(
descriptionDf,
Seq(
Row("data", "string", null),
Row("id", "bigint", null)))
}
}
test("Describing a partition is not supported") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing " +
"PARTITIONED BY (id)")
val e = intercept[AnalysisException] {
sql(s"DESCRIBE TABLE $tbl PARTITION (id = 1)")
}
assert(e.message === "Partition not found in table 'table' database 'ns':\nid -> 1")
}
}
}
/**
* The class contains tests for the `DESCRIBE TABLE` command to check V1 In-Memory
* table catalog.
*/
class DescribeTableSuite extends DescribeTableSuiteBase with CommandSuiteBase {
override def commandVersion: String = super[DescribeTableSuiteBase].commandVersion
test("DESCRIBE TABLE EXTENDED of a partitioned table") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing" +
" PARTITIONED BY (id)" +
" TBLPROPERTIES ('bar'='baz')" +
" COMMENT 'this is a test table'" +
" LOCATION 'file:/tmp/testcat/table_name'")
val descriptionDf = spark.sql(s"DESCRIBE TABLE EXTENDED $tbl")
assert(descriptionDf.schema.map(field => (field.name, field.dataType)) === Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
QueryTest.checkAnswer(
descriptionDf.filter("col_name != 'Created Time'"),
Seq(
Row("data", "string", null),
Row("id", "bigint", null),
Row("# Partition Information", "", ""),
Row("# col_name", "data_type", "comment"),
Row("id", "bigint", null),
Row("", "", ""),
Row("# Detailed Table Information", "", ""),
Row("Database", "ns", ""),
Row("Table", "table", ""),
Row("Last Access", "UNKNOWN", ""),
Row("Created By", "Spark 3.4.0-SNAPSHOT", ""),
Row("Type", "EXTERNAL", ""),
Row("Provider", "parquet", ""),
Row("Comment", "this is a test table", ""),
Row("Table Properties", "[bar=baz]", ""),
Row("Location", "file:/tmp/testcat/table_name", ""),
Row("Partition Provider", "Catalog", "")))
}
}
}

View File

@ -0,0 +1,96 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.v2
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.connector.catalog.TableCatalog
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.types.StringType
import org.apache.spark.util.Utils
/**
* The class contains tests for the `DESCRIBE TABLE` command to check V2 table catalogs.
*/
class DescribeTableSuite extends command.DescribeTableSuiteBase with CommandSuiteBase {
test("DESCRIBE TABLE with non-'partitioned-by' clause") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing")
val descriptionDf = spark.sql(s"DESCRIBE TABLE $tbl")
assert(descriptionDf.schema.map(field => (field.name, field.dataType)) ===
Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
QueryTest.checkAnswer(
descriptionDf,
Seq(
Row("data", "string", ""),
Row("id", "bigint", ""),
Row("", "", ""),
Row("# Partitioning", "", ""),
Row("Not partitioned", "", "")))
}
}
test("Describing a partition is not supported") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing " +
"PARTITIONED BY (id)")
val e = intercept[AnalysisException] {
sql(s"DESCRIBE TABLE $tbl PARTITION (id = 1)")
}
assert(e.message === "DESCRIBE does not support partition for v2 tables.")
}
}
test("DESCRIBE TABLE EXTENDED of a partitioned table") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing" +
" PARTITIONED BY (id)" +
" TBLPROPERTIES ('bar'='baz')" +
" COMMENT 'this is a test table'" +
" LOCATION 'file:/tmp/testcat/table_name'")
val descriptionDf = spark.sql(s"DESCRIBE TABLE EXTENDED $tbl")
assert(descriptionDf.schema.map(field => (field.name, field.dataType)) === Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
QueryTest.checkAnswer(
descriptionDf,
Seq(
Row("id", "bigint", ""),
Row("data", "string", ""),
Row("", "", ""),
Row("# Partitioning", "", ""),
Row("Part 0", "id", ""),
Row("", "", ""),
Row("# Metadata Columns", "", ""),
Row("index", "int", "Metadata column used to conflict with a data column"),
Row("_partition", "string", "Partition key used to store the row"),
Row("", "", ""),
Row("# Detailed Table Information", "", ""),
Row("Name", tbl, ""),
Row("Comment", "this is a test table", ""),
Row("Location", "file:/tmp/testcat/table_name", ""),
Row("Provider", "_", ""),
Row(TableCatalog.PROP_OWNER.capitalize, Utils.getCurrentUserName(), ""),
Row("Table Properties", "[bar=baz]", "")))
}
}
}

View File

@ -378,17 +378,6 @@ class HiveCatalogedDDLSuite extends DDLSuite with TestHiveSingleton with BeforeA
catalog.reset()
}
}
test("Table Ownership") {
val catalog = spark.sessionState.catalog
try {
sql(s"CREATE TABLE spark_30019(k int)")
assert(sql(s"DESCRIBE TABLE EXTENDED spark_30019").where("col_name='Owner'")
.collect().head.getString(1) === Utils.getCurrentUserName())
} finally {
catalog.reset()
}
}
}
@SlowHiveTest

View File

@ -0,0 +1,85 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution.command
import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.connector.catalog.TableCatalog
import org.apache.spark.sql.execution.command.v1
import org.apache.spark.sql.types.StringType
import org.apache.spark.util.Utils
/**
* The class contains tests for the `DESCRIBE TABLE` command to check V1 Hive external
* table catalog.
*/
class DescribeTableSuite extends v1.DescribeTableSuiteBase with CommandSuiteBase {
override def commandVersion: String = super[DescribeTableSuiteBase].commandVersion
test("Table Ownership") {
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (c int) $defaultUsing")
checkHiveClientCalls(expected = 6) {
checkAnswer(
sql(s"DESCRIBE TABLE EXTENDED $t")
.where("col_name='Owner'")
.select("col_name", "data_type"),
Row("Owner", Utils.getCurrentUserName()))
}
}
}
test("DESCRIBE TABLE EXTENDED of a partitioned table") {
withNamespaceAndTable("ns", "table") { tbl =>
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing" +
" PARTITIONED BY (id)" +
" COMMENT 'this is a test table'" +
" LOCATION 'file:/tmp/testcat/table_name'")
val descriptionDf = spark.sql(s"DESCRIBE TABLE EXTENDED $tbl")
assert(descriptionDf.schema.map(field => (field.name, field.dataType)) === Seq(
("col_name", StringType),
("data_type", StringType),
("comment", StringType)))
QueryTest.checkAnswer(
// Filter out 'Table Properties' to don't check `transient_lastDdlTime`
descriptionDf.filter("col_name != 'Created Time' and col_name != 'Table Properties'"),
Seq(
Row("data", "string", null),
Row("id", "bigint", null),
Row("# Partition Information", "", ""),
Row("# col_name", "data_type", "comment"),
Row("id", "bigint", null),
Row("", "", ""),
Row("# Detailed Table Information", "", ""),
Row("Database", "ns", ""),
Row("Table", "table", ""),
Row(TableCatalog.PROP_OWNER.capitalize, Utils.getCurrentUserName(), ""),
Row("Last Access", "UNKNOWN", ""),
Row("Created By", "Spark 3.4.0-SNAPSHOT", ""),
Row("Type", "EXTERNAL", ""),
Row("Provider", "hive", ""),
Row("Comment", "this is a test table", ""),
Row("Location", "file:/tmp/testcat/table_name", ""),
Row("Serde Library", "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe", ""),
Row("InputFormat", "org.apache.hadoop.mapred.TextInputFormat", ""),
Row("OutputFormat", "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat", ""),
Row("Storage Properties", "[serialization.format=1]", ""),
Row("Partition Provider", "Catalog", "")))
}
}
}