[MINOR][SQL] Combine the same codes in test cases

## What changes were proposed in this pull request?

In the DDLSuit, there are four test cases have the same codes , writing a function can combine the same code.

## How was this patch tested?

existing tests.

Closes #23194 from CarolinePeng/Update_temp.

Authored-by: 彭灿00244106 <00244106@zte.intra>
Signed-off-by: Takeshi Yamamuro <yamamuro@apache.org>
This commit is contained in:
彭灿00244106 2018-12-04 22:08:16 +09:00 committed by Takeshi Yamamuro
parent 2612848422
commit 93f5592aa8

View file

@ -377,41 +377,41 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}
}
test("CTAS a managed table with the existing empty directory") {
val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab1")))
private def withEmptyDirInTablePath(dirName: String)(f : File => Unit): Unit = {
val tableLoc =
new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier(dirName)))
try {
tableLoc.mkdir()
withTable("tab1") {
sql(s"CREATE TABLE tab1 USING ${dataSource} AS SELECT 1, 'a'")
checkAnswer(spark.table("tab1"), Row(1, "a"))
}
f(tableLoc)
} finally {
waitForTasksToFinish()
Utils.deleteRecursively(tableLoc)
}
}
test("CTAS a managed table with the existing empty directory") {
withEmptyDirInTablePath("tab1") { tableLoc =>
withTable("tab1") {
sql(s"CREATE TABLE tab1 USING ${dataSource} AS SELECT 1, 'a'")
checkAnswer(spark.table("tab1"), Row(1, "a"))
}
}
}
test("create a managed table with the existing empty directory") {
val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab1")))
try {
tableLoc.mkdir()
withEmptyDirInTablePath("tab1") { tableLoc =>
withTable("tab1") {
sql(s"CREATE TABLE tab1 (col1 int, col2 string) USING ${dataSource}")
sql("INSERT INTO tab1 VALUES (1, 'a')")
checkAnswer(spark.table("tab1"), Row(1, "a"))
}
} finally {
waitForTasksToFinish()
Utils.deleteRecursively(tableLoc)
}
}
test("create a managed table with the existing non-empty directory") {
withTable("tab1") {
val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab1")))
try {
// create an empty hidden file
tableLoc.mkdir()
withEmptyDirInTablePath("tab1") { tableLoc =>
val hiddenGarbageFile = new File(tableLoc.getCanonicalPath, ".garbage")
hiddenGarbageFile.createNewFile()
val exMsg = "Can not create the managed table('`tab1`'). The associated location"
@ -439,28 +439,20 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}.getMessage
assert(ex.contains(exMsgWithDefaultDB))
}
} finally {
waitForTasksToFinish()
Utils.deleteRecursively(tableLoc)
}
}
}
test("rename a managed table with existing empty directory") {
val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier("tab2")))
try {
withEmptyDirInTablePath("tab2") { tableLoc =>
withTable("tab1") {
sql(s"CREATE TABLE tab1 USING $dataSource AS SELECT 1, 'a'")
tableLoc.mkdir()
val ex = intercept[AnalysisException] {
sql("ALTER TABLE tab1 RENAME TO tab2")
}.getMessage
val expectedMsg = "Can not rename the managed table('`tab1`'). The associated location"
assert(ex.contains(expectedMsg))
}
} finally {
waitForTasksToFinish()
Utils.deleteRecursively(tableLoc)
}
}