[SPARK-27813][SQL] DataSourceV2: Add DropTable logical operation

## What changes were proposed in this pull request?

Support DROP TABLE from V2 catalogs.
Move DROP TABLE into catalyst.
Move parsing tests for DROP TABLE/VIEW to PlanResolutionSuite to validate existing behavior.
Add new tests fo catalyst parser suite.
Separate DROP VIEW into different code path from DROP TABLE.
Move DROP VIEW into catalyst as a new operator.
Add a meaningful exception to indicate view is not currently supported in v2 catalog.

## How was this patch tested?

New unit tests.
Existing unit tests in catalyst and sql core.

Closes #24686 from jzhuge/SPARK-27813-pr.

Authored-by: John Zhuge <jzhuge@apache.org>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
This commit is contained in:
John Zhuge 2019-05-31 00:56:07 +08:00 committed by Wenchen Fan
parent bd87323003
commit a44b00dfe0
14 changed files with 294 additions and 81 deletions

View file

@ -139,8 +139,8 @@ statement
DROP (IF EXISTS)? partitionSpec (',' partitionSpec)* #dropTablePartitions
| ALTER TABLE tableIdentifier partitionSpec? SET locationSpec #setTableLocation
| ALTER TABLE tableIdentifier RECOVER PARTITIONS #recoverPartitions
| DROP TABLE (IF EXISTS)? tableIdentifier PURGE? #dropTable
| DROP VIEW (IF EXISTS)? tableIdentifier #dropTable
| DROP TABLE (IF EXISTS)? multipartIdentifier PURGE? #dropTable
| DROP VIEW (IF EXISTS)? multipartIdentifier #dropView
| CREATE (OR REPLACE)? (GLOBAL? TEMPORARY)?
VIEW (IF NOT EXISTS)? tableIdentifier
identifierCommentList?

View file

@ -22,6 +22,8 @@ import org.apache.spark.annotation.Experimental;
import java.util.Arrays;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* An {@link Identifier} implementation.
@ -49,6 +51,21 @@ class IdentifierImpl implements Identifier {
return name;
}
private String escapeQuote(String part) {
if (part.contains("`")) {
return part.replace("`", "``");
} else {
return part;
}
}
@Override
public String toString() {
return Stream.concat(Stream.of(namespace), Stream.of(name))
.map(part -> '`' + escapeQuote(part) + '`')
.collect(Collectors.joining("."));
}
@Override
public boolean equals(Object o) {
if (this == o) {

View file

@ -38,7 +38,7 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last}
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.sql.{CreateTableAsSelectStatement, CreateTableStatement}
import org.apache.spark.sql.catalyst.plans.logical.sql.{CreateTableAsSelectStatement, CreateTableStatement, DropTableStatement, DropViewStatement}
import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId, stringToDate, stringToTimestamp}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
@ -2195,4 +2195,22 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
}
}
/**
* Create a [[DropTableStatement]] command.
*/
override def visitDropTable(ctx: DropTableContext): LogicalPlan = withOrigin(ctx) {
DropTableStatement(
visitMultipartIdentifier(ctx.multipartIdentifier()),
ctx.EXISTS != null,
ctx.PURGE != null)
}
/**
* Create a [[DropViewStatement]] command.
*/
override def visitDropView(ctx: DropViewContext): AnyRef = withOrigin(ctx) {
DropViewStatement(
visitMultipartIdentifier(ctx.multipartIdentifier()),
ctx.EXISTS != null)
}
}

View file

@ -499,6 +499,14 @@ object OverwritePartitionsDynamic {
}
}
/**
* Drop a table.
*/
case class DropTable(
catalog: TableCatalog,
ident: Identifier,
ifExists: Boolean) extends Command
/**
* Insert some data into a table. Note that this plan is unresolved and has to be replaced by the

View file

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical.sql
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
/**
* A DROP TABLE statement, as parsed from SQL.
*/
case class DropTableStatement(
tableName: Seq[String],
ifExists: Boolean,
purge: Boolean) extends ParsedStatement {
override def output: Seq[Attribute] = Seq.empty
override def children: Seq[LogicalPlan] = Seq.empty
}

View file

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical.sql
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
/**
* A DROP VIEW statement, as parsed from SQL.
*/
case class DropViewStatement(
viewName: Seq[String],
ifExists: Boolean) extends ParsedStatement {
override def output: Seq[Attribute] = Seq.empty
override def children: Seq[LogicalPlan] = Seq.empty
}

View file

@ -20,7 +20,8 @@ package org.apache.spark.sql.catalyst.parser
import org.apache.spark.sql.catalog.v2.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, YearsTransform}
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.plans.logical.sql.{CreateTableAsSelectStatement, CreateTableStatement}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.plans.logical.sql.{CreateTableAsSelectStatement, CreateTableStatement, DropTableStatement, DropViewStatement}
import org.apache.spark.sql.types.{IntegerType, StringType, StructType, TimestampType}
import org.apache.spark.unsafe.types.UTF8String
@ -34,6 +35,10 @@ class DDLParserSuite extends AnalysisTest {
}
}
private def parseCompare(sql: String, expected: LogicalPlan): Unit = {
comparePlans(parsePlan(sql), expected, checkAnalysis = false)
}
test("create table using - schema") {
val sql = "CREATE TABLE my_tab(a INT COMMENT 'test', b STRING) USING parquet"
@ -362,4 +367,31 @@ class DDLParserSuite extends AnalysisTest {
}
}
}
test("drop table") {
parseCompare("DROP TABLE testcat.ns1.ns2.tbl",
DropTableStatement(Seq("testcat", "ns1", "ns2", "tbl"), ifExists = false, purge = false))
parseCompare(s"DROP TABLE db.tab",
DropTableStatement(Seq("db", "tab"), ifExists = false, purge = false))
parseCompare(s"DROP TABLE IF EXISTS db.tab",
DropTableStatement(Seq("db", "tab"), ifExists = true, purge = false))
parseCompare(s"DROP TABLE tab",
DropTableStatement(Seq("tab"), ifExists = false, purge = false))
parseCompare(s"DROP TABLE IF EXISTS tab",
DropTableStatement(Seq("tab"), ifExists = true, purge = false))
parseCompare(s"DROP TABLE tab PURGE",
DropTableStatement(Seq("tab"), ifExists = false, purge = true))
parseCompare(s"DROP TABLE IF EXISTS tab PURGE",
DropTableStatement(Seq("tab"), ifExists = true, purge = true))
}
test("drop view") {
parseCompare(s"DROP VIEW testcat.db.view",
DropViewStatement(Seq("testcat", "db", "view"), ifExists = false))
parseCompare(s"DROP VIEW db.view", DropViewStatement(Seq("db", "view"), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS db.view",
DropViewStatement(Seq("db", "view"), ifExists = true))
parseCompare(s"DROP VIEW view", DropViewStatement(Seq("view"), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS view", DropViewStatement(Seq("view"), ifExists = true))
}
}

View file

@ -645,17 +645,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
ctx.TEMPORARY != null)
}
/**
* Create a [[DropTableCommand]] command.
*/
override def visitDropTable(ctx: DropTableContext): LogicalPlan = withOrigin(ctx) {
DropTableCommand(
visitTableIdentifier(ctx.tableIdentifier),
ctx.EXISTS != null,
ctx.VIEW != null,
ctx.PURGE != null)
}
/**
* Create a [[AlterTableRenameCommand]] command.
*

View file

@ -27,9 +27,10 @@ import org.apache.spark.sql.catalog.v2.expressions.Transform
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.CastSupport
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTableType, CatalogUtils}
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, LogicalPlan}
import org.apache.spark.sql.catalyst.plans.logical.sql.{CreateTableAsSelectStatement, CreateTableStatement}
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan}
import org.apache.spark.sql.catalyst.plans.logical.sql.{CreateTableAsSelectStatement, CreateTableStatement, DropTableStatement, DropViewStatement}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.command.DropTableCommand
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.v2.TableProvider
import org.apache.spark.sql.types.StructType
@ -83,6 +84,20 @@ case class DataSourceResolution(
s"No catalog specified for table ${identifier.quoted} and no default catalog is set"))
.asTableCatalog
convertCTAS(catalog, identifier, create)
case DropTableStatement(CatalogObjectIdentifier(Some(catalog), ident), ifExists, _) =>
DropTable(catalog.asTableCatalog, ident, ifExists)
case DropTableStatement(AsTableIdentifier(tableName), ifExists, purge) =>
DropTableCommand(tableName, ifExists, isView = false, purge)
case DropViewStatement(CatalogObjectIdentifier(Some(catalog), ident), _) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view $ident " +
s"because view support in catalog has not been implemented yet")
case DropViewStatement(AsTableIdentifier(tableName), ifExists) =>
DropTableCommand(tableName, ifExists, isView = true, purge = false)
}
object V1WriteProvider {

View file

@ -23,7 +23,7 @@ import scala.collection.mutable
import org.apache.spark.sql.{AnalysisException, Strategy}
import org.apache.spark.sql.catalyst.expressions.{And, AttributeReference, AttributeSet, Expression, PredicateHelper, SubqueryExpression}
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, CreateV2Table, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, Repartition}
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, Repartition}
import org.apache.spark.sql.execution.{FilterExec, ProjectExec, SparkPlan}
import org.apache.spark.sql.execution.datasources.DataSourceStrategy
import org.apache.spark.sql.execution.streaming.continuous.{ContinuousCoalesceExec, WriteToContinuousDataSource, WriteToContinuousDataSourceExec}
@ -199,6 +199,9 @@ object DataSourceV2Strategy extends Strategy with PredicateHelper {
Nil
}
case DropTable(catalog, ident, ifExists) =>
DropTableExec(catalog, ident, ifExists) :: Nil
case _ => Nil
}
}

View file

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalog.v2.{Identifier, TableCatalog}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.execution.LeafExecNode
/**
* Physical plan node for dropping a table.
*/
case class DropTableExec(catalog: TableCatalog, ident: Identifier, ifExists: Boolean)
extends LeafExecNode {
override def doExecute(): RDD[InternalRow] = {
if (catalog.tableExists(ident)) {
catalog.dropTable(ident)
} else if (!ifExists) {
throw new NoSuchTableException(ident)
}
sqlContext.sparkContext.parallelize(Seq.empty, 1)
}
override def output: Seq[Attribute] = Seq.empty
}

View file

@ -32,13 +32,12 @@ import org.apache.spark.sql.catalyst.dsl.plans.DslLogicalPlan
import org.apache.spark.sql.catalyst.expressions.JsonTuple
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{Generate, InsertIntoDir, LogicalPlan}
import org.apache.spark.sql.catalyst.plans.logical.{Project, ScriptTransformation}
import org.apache.spark.sql.catalyst.plans.logical.{Generate, InsertIntoDir, LogicalPlan, Project, ScriptTransformation}
import org.apache.spark.sql.execution.SparkSqlParser
import org.apache.spark.sql.execution.datasources.CreateTable
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
class DDLParserSuite extends PlanTest with SharedSQLContext {
private lazy val parser = new SparkSqlParser(new SQLConf)
@ -903,64 +902,6 @@ class DDLParserSuite extends PlanTest with SharedSQLContext {
assert(e.contains("Found an empty partition key 'b'"))
}
test("drop table") {
val tableName1 = "db.tab"
val tableName2 = "tab"
val parsed = Seq(
s"DROP TABLE $tableName1",
s"DROP TABLE IF EXISTS $tableName1",
s"DROP TABLE $tableName2",
s"DROP TABLE IF EXISTS $tableName2",
s"DROP TABLE $tableName2 PURGE",
s"DROP TABLE IF EXISTS $tableName2 PURGE"
).map(parser.parsePlan)
val expected = Seq(
DropTableCommand(TableIdentifier("tab", Option("db")), ifExists = false, isView = false,
purge = false),
DropTableCommand(TableIdentifier("tab", Option("db")), ifExists = true, isView = false,
purge = false),
DropTableCommand(TableIdentifier("tab", None), ifExists = false, isView = false,
purge = false),
DropTableCommand(TableIdentifier("tab", None), ifExists = true, isView = false,
purge = false),
DropTableCommand(TableIdentifier("tab", None), ifExists = false, isView = false,
purge = true),
DropTableCommand(TableIdentifier("tab", None), ifExists = true, isView = false,
purge = true))
parsed.zip(expected).foreach { case (p, e) => comparePlans(p, e) }
}
test("drop view") {
val viewName1 = "db.view"
val viewName2 = "view"
val parsed1 = parser.parsePlan(s"DROP VIEW $viewName1")
val parsed2 = parser.parsePlan(s"DROP VIEW IF EXISTS $viewName1")
val parsed3 = parser.parsePlan(s"DROP VIEW $viewName2")
val parsed4 = parser.parsePlan(s"DROP VIEW IF EXISTS $viewName2")
val expected1 =
DropTableCommand(TableIdentifier("view", Option("db")), ifExists = false, isView = true,
purge = false)
val expected2 =
DropTableCommand(TableIdentifier("view", Option("db")), ifExists = true, isView = true,
purge = false)
val expected3 =
DropTableCommand(TableIdentifier("view", None), ifExists = false, isView = true,
purge = false)
val expected4 =
DropTableCommand(TableIdentifier("view", None), ifExists = true, isView = true,
purge = false)
comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
comparePlans(parsed3, expected3)
comparePlans(parsed4, expected4)
}
test("show columns") {
val sql1 = "SHOW COLUMNS FROM t1"
val sql2 = "SHOW COLUMNS IN db1.t1"

View file

@ -18,6 +18,7 @@
package org.apache.spark.sql.execution.command
import java.net.URI
import java.util.Locale
import org.apache.spark.sql.{AnalysisException, SaveMode}
import org.apache.spark.sql.catalog.v2.{CatalogNotFoundException, CatalogPlugin, Identifier, TableCatalog, TestTableCatalog}
@ -25,7 +26,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, LogicalPlan}
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan}
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSourceResolution}
import org.apache.spark.sql.execution.datasources.v2.orc.OrcDataSourceV2
import org.apache.spark.sql.types.{DoubleType, IntegerType, LongType, StringType, StructType}
@ -55,6 +56,9 @@ class PlanResolutionSuite extends AnalysisTest {
DataSourceResolution(newConf, lookupCatalog).apply(parsePlan(query))
}
private def parseResolveCompare(query: String, expected: LogicalPlan): Unit =
comparePlans(parseAndResolve(query), expected, checkAnalysis = true)
private def extractTableDesc(sql: String): (CatalogTable, Boolean) = {
parseAndResolve(sql).collect {
case CreateTable(tableDesc, mode, _) => (tableDesc, mode == SaveMode.Ignore)
@ -438,4 +442,63 @@ class PlanResolutionSuite extends AnalysisTest {
s"got ${other.getClass.getName}: $sql")
}
}
test("drop table") {
val tableName1 = "db.tab"
val tableIdent1 = TableIdentifier("tab", Option("db"))
val tableName2 = "tab"
val tableIdent2 = TableIdentifier("tab", None)
parseResolveCompare(s"DROP TABLE $tableName1",
DropTableCommand(tableIdent1, ifExists = false, isView = false, purge = false))
parseResolveCompare(s"DROP TABLE IF EXISTS $tableName1",
DropTableCommand(tableIdent1, ifExists = true, isView = false, purge = false))
parseResolveCompare(s"DROP TABLE $tableName2",
DropTableCommand(tableIdent2, ifExists = false, isView = false, purge = false))
parseResolveCompare(s"DROP TABLE IF EXISTS $tableName2",
DropTableCommand(tableIdent2, ifExists = true, isView = false, purge = false))
parseResolveCompare(s"DROP TABLE $tableName2 PURGE",
DropTableCommand(tableIdent2, ifExists = false, isView = false, purge = true))
parseResolveCompare(s"DROP TABLE IF EXISTS $tableName2 PURGE",
DropTableCommand(tableIdent2, ifExists = true, isView = false, purge = true))
}
test("drop table in v2 catalog") {
val tableName1 = "testcat.db.tab"
val tableIdent1 = Identifier.of(Array("db"), "tab")
val tableName2 = "testcat.tab"
val tableIdent2 = Identifier.of(Array.empty, "tab")
parseResolveCompare(s"DROP TABLE $tableName1",
DropTable(testCat, tableIdent1, ifExists = false))
parseResolveCompare(s"DROP TABLE IF EXISTS $tableName1",
DropTable(testCat, tableIdent1, ifExists = true))
parseResolveCompare(s"DROP TABLE $tableName2",
DropTable(testCat, tableIdent2, ifExists = false))
parseResolveCompare(s"DROP TABLE IF EXISTS $tableName2",
DropTable(testCat, tableIdent2, ifExists = true))
}
test("drop view") {
val viewName1 = "db.view"
val viewIdent1 = TableIdentifier("view", Option("db"))
val viewName2 = "view"
val viewIdent2 = TableIdentifier("view")
parseResolveCompare(s"DROP VIEW $viewName1",
DropTableCommand(viewIdent1, ifExists = false, isView = true, purge = false))
parseResolveCompare(s"DROP VIEW IF EXISTS $viewName1",
DropTableCommand(viewIdent1, ifExists = true, isView = true, purge = false))
parseResolveCompare(s"DROP VIEW $viewName2",
DropTableCommand(viewIdent2, ifExists = false, isView = true, purge = false))
parseResolveCompare(s"DROP VIEW IF EXISTS $viewName2",
DropTableCommand(viewIdent2, ifExists = true, isView = true, purge = false))
}
test("drop view in v2 catalog") {
intercept[AnalysisException] {
parseAndResolve("DROP VIEW testcat.db.view")
}.getMessage.toLowerCase(Locale.ROOT).contains(
"view support in catalog has not been implemented")
}
}

View file

@ -23,7 +23,7 @@ import org.scalatest.BeforeAndAfter
import org.apache.spark.sql.{AnalysisException, QueryTest}
import org.apache.spark.sql.catalog.v2.Identifier
import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
import org.apache.spark.sql.catalyst.analysis.{NoSuchTableException, TableAlreadyExistsException}
import org.apache.spark.sql.execution.datasources.v2.orc.OrcDataSourceV2
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types.{LongType, StringType, StructType}
@ -266,4 +266,20 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
conf.setConfString("spark.sql.default.catalog", originalDefaultCatalog)
}
}
test("DropTable: basic") {
val tableName = "testcat.ns1.ns2.tbl"
val ident = Identifier.of(Array("ns1", "ns2"), "tbl")
sql(s"CREATE TABLE $tableName USING foo AS SELECT id, data FROM source")
assert(spark.catalog("testcat").asTableCatalog.tableExists(ident) === true)
sql(s"DROP TABLE $tableName")
assert(spark.catalog("testcat").asTableCatalog.tableExists(ident) === false)
}
test("DropTable: if exists") {
intercept[NoSuchTableException] {
sql(s"DROP TABLE testcat.db.notbl")
}
sql(s"DROP TABLE IF EXISTS testcat.db.notbl")
}
}