[SPARK-29612][SQL] ALTER TABLE (RECOVER PARTITIONS) should look up catalog/table like v2 commands

### What changes were proposed in this pull request?
Add AlterTableRecoverPartitionsStatement and make ALTER TABLE ... RECOVER PARTITIONS go through the same catalog/table resolution framework of v2 commands.

### Why are the changes needed?
It's important to make all the commands have the same table resolution behavior, to avoid confusing end-users. e.g.
```
USE my_catalog
DESC t // success and describe the table t from my_catalog
ALTER TABLE t RECOVER PARTITIONS  // report table not found as there is no table t in the session catalog
```

### Does this PR introduce any user-facing change?
Yes. When running ALTER TABLE ... RECOVER PARTITIONS Spark fails the command if the current catalog is set to a v2 catalog, or the table name specified a v2 catalog.

### How was this patch tested?
Unit tests.

Closes #26269 from huaxingao/spark-29612.

Authored-by: Huaxin Gao <huaxing@us.ibm.com>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
This commit is contained in:
Huaxin Gao 2019-10-29 13:54:07 +08:00 committed by Wenchen Fan
parent b33a58c0c6
commit e92b75482f
8 changed files with 43 additions and 22 deletions

View file

@ -170,7 +170,7 @@ statement
DROP (IF EXISTS)? partitionSpec (',' partitionSpec)* #dropTablePartitions
| ALTER TABLE multipartIdentifier SET locationSpec #setTableLocation
| ALTER TABLE tableIdentifier partitionSpec SET locationSpec #setPartitionLocation
| ALTER TABLE tableIdentifier RECOVER PARTITIONS #recoverPartitions
| ALTER TABLE multipartIdentifier RECOVER PARTITIONS #recoverPartitions
| DROP TABLE (IF EXISTS)? multipartIdentifier PURGE? #dropTable
| DROP VIEW (IF EXISTS)? multipartIdentifier #dropView
| CREATE (OR REPLACE)? (GLOBAL? TEMPORARY)?

View file

@ -2911,4 +2911,17 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
override def visitRefreshTable(ctx: RefreshTableContext): LogicalPlan = withOrigin(ctx) {
RefreshTableStatement(visitMultipartIdentifier(ctx.multipartIdentifier()))
}
/**
* Create an [[AlterTableRecoverPartitionsStatement]]
*
* For example:
* {{{
* ALTER TABLE multi_part_name RECOVER PARTITIONS;
* }}}
*/
override def visitRecoverPartitions(
ctx: RecoverPartitionsContext): LogicalPlan = withOrigin(ctx) {
AlterTableRecoverPartitionsStatement(visitMultipartIdentifier(ctx.multipartIdentifier))
}
}

View file

@ -181,6 +181,12 @@ case class AlterTableSetLocationStatement(
tableName: Seq[String],
location: String) extends ParsedStatement
/**
* ALTER TABLE ... RECOVER PARTITIONS command, as parsed from SQL.
*/
case class AlterTableRecoverPartitionsStatement(
tableName: Seq[String]) extends ParsedStatement
/**
* ALTER VIEW ... SET TBLPROPERTIES command, as parsed from SQL.
*/

View file

@ -1169,6 +1169,12 @@ class DDLParserSuite extends AnalysisTest {
RefreshTableStatement(Seq("a", "b", "c")))
}
test("alter table: recover partitions") {
comparePlans(
parsePlan("ALTER TABLE a.b.c RECOVER PARTITIONS"),
AlterTableRecoverPartitionsStatement(Seq("a", "b", "c")))
}
private case class TableSpec(
name: Seq[String],
schema: Option[StructType],

View file

@ -338,6 +338,12 @@ class ResolveSessionCatalog(
ShowPartitionsCommand(
v1TableName.asTableIdentifier,
partitionSpec)
case AlterTableRecoverPartitionsStatement(tableName) =>
val v1TableName = parseV1Table(tableName, "ALTER TABLE RECOVER PARTITIONS")
AlterTableRecoverPartitionsCommand(
v1TableName.asTableIdentifier,
"ALTER TABLE RECOVER PARTITIONS")
}
private def parseV1Table(tableName: Seq[String], sql: String): Seq[String] = {

View file

@ -515,19 +515,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
retainData = false)
}
/**
* Create an [[AlterTableRecoverPartitionsCommand]] command
*
* For example:
* {{{
* ALTER TABLE table RECOVER PARTITIONS;
* }}}
*/
override def visitRecoverPartitions(
ctx: RecoverPartitionsContext): LogicalPlan = withOrigin(ctx) {
AlterTableRecoverPartitionsCommand(visitTableIdentifier(ctx.tableIdentifier))
}
/**
* Create an [[AlterTableSetLocationCommand]] command for a partition.
*

View file

@ -1358,6 +1358,17 @@ class DataSourceV2SQLSuite
}
}
test("ALTER TABLE RECOVER PARTITIONS") {
val t = "testcat.ns1.ns2.tbl"
withTable(t) {
spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo")
val e = intercept[AnalysisException] {
val partition = sql(s"ALTER TABLE $t RECOVER PARTITIONS")
}
assert(e.message.contains("ALTER TABLE RECOVER PARTITIONS is only supported with v1 tables"))
}
}
private def testV1Command(sqlCommand: String, sqlParams: String): Unit = {
val e = intercept[AnalysisException] {
sql(s"$sqlCommand $sqlParams")

View file

@ -549,14 +549,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
comparePlans(parsed2, expected2)
}
test("alter table: recover partitions") {
val sql = "ALTER TABLE table_name RECOVER PARTITIONS"
val parsed = parser.parsePlan(sql)
val expected = AlterTableRecoverPartitionsCommand(
TableIdentifier("table_name", None))
comparePlans(parsed, expected)
}
test("alter view: add partition (not supported)") {
assertUnsupported(
"""