diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index 61ac6346ff..64496a9538 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -996,12 +996,12 @@ trait CheckAnalysis extends PredicateHelper { private def checkAlterTablePartition( table: Table, parts: Seq[PartitionSpec]): Unit = { (table, parts) match { - case (_, parts) if parts.exists(_.isInstanceOf[UnresolvedPartitionSpec]) => - failAnalysis("PartitionSpecs are not resolved") - case (table, _) if !table.isInstanceOf[SupportsPartitionManagement] => failAnalysis(s"Table ${table.name()} can not alter partitions.") + case (_, parts) if parts.exists(_.isInstanceOf[UnresolvedPartitionSpec]) => + failAnalysis("PartitionSpecs are not resolved") + // Skip atomic partition tables case (_: SupportsAtomicPartitionManagement, _) => case (_: SupportsPartitionManagement, parts) if parts.size > 1 => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala index 3583eceec7..47b5e5e54e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala @@ -245,4 +245,20 @@ class AlterTablePartitionV2SQLSuite extends DatasourceV2SQLBase { assert(!partTable.partitionExists(expectedPartition)) } } + + test("SPARK-33650: add/drop partition into a table which doesn't support partition management") { + val t = "testcat.ns1.ns2.tbl" + withTable(t) { + spark.sql(s"CREATE TABLE $t (id bigint, data string) USING _") + Seq( + s"ALTER TABLE $t ADD PARTITION (id=1)", + s"ALTER TABLE $t DROP PARTITION (id=1)" + ).foreach { alterTable => + val errMsg = intercept[AnalysisException] { + spark.sql(alterTable) + }.getMessage + assert(errMsg.contains(s"Table $t can not alter partitions")) + } + } + } }