[SPARK-13079][SQL] InMemoryCatalog follow-ups

This patch incorporates review feedback from #11069, which is already merged.

Author: Andrew Or <andrew@databricks.com>

Closes #11080 from andrewor14/catalog-follow-ups.
This commit is contained in:
Andrew Or 2016-02-04 12:20:18 -08:00 committed by Reynold Xin
parent c756bda477
commit bd38dd6f75
2 changed files with 22 additions and 5 deletions

View file

@ -39,6 +39,9 @@ abstract class Catalog {
def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit
/**
* Alter an existing database. This operation does not support renaming.
*/
def alterDatabase(db: String, dbDefinition: Database): Unit
def getDatabase(db: String): Database
@ -57,6 +60,9 @@ abstract class Catalog {
def renameTable(db: String, oldName: String, newName: String): Unit
/**
* Alter an existing table. This operation does not support renaming.
*/
def alterTable(db: String, table: String, tableDefinition: Table): Unit
def getTable(db: String, table: String): Table
@ -81,6 +87,9 @@ abstract class Catalog {
parts: Seq[PartitionSpec],
ignoreIfNotExists: Boolean): Unit
/**
* Alter an existing table partition and optionally override its spec.
*/
def alterPartition(
db: String,
table: String,
@ -100,6 +109,9 @@ abstract class Catalog {
def dropFunction(db: String, funcName: String): Unit
/**
* Alter an existing function and optionally override its name.
*/
def alterFunction(db: String, funcName: String, funcDefinition: Function): Unit
def getFunction(db: String, funcName: String): Function
@ -194,5 +206,8 @@ case class Database(
object Catalog {
/**
* Specifications of a table partition indexed by column name.
*/
type PartitionSpec = Map[String, String]
}

View file

@ -27,10 +27,10 @@ import org.apache.spark.sql.AnalysisException
* Implementations of the [[Catalog]] interface can create test suites by extending this.
*/
abstract class CatalogTestCases extends SparkFunSuite {
private val storageFormat = StorageFormat("usa", "$", "zzz", "serde", Map.empty[String, String])
private val part1 = TablePartition(Map[String, String]("a" -> "1"), storageFormat)
private val part2 = TablePartition(Map[String, String]("b" -> "2"), storageFormat)
private val part3 = TablePartition(Map[String, String]("c" -> "3"), storageFormat)
private val storageFormat = StorageFormat("usa", "$", "zzz", "serde", Map())
private val part1 = TablePartition(Map("a" -> "1"), storageFormat)
private val part2 = TablePartition(Map("b" -> "2"), storageFormat)
private val part3 = TablePartition(Map("c" -> "3"), storageFormat)
private val funcClass = "org.apache.spark.myFunc"
protected def newEmptyCatalog(): Catalog
@ -42,6 +42,8 @@ abstract class CatalogTestCases extends SparkFunSuite {
* db2
* - tbl1
* - tbl2
* - part1
* - part2
* - func1
*/
private def newBasicCatalog(): Catalog = {
@ -50,8 +52,8 @@ abstract class CatalogTestCases extends SparkFunSuite {
catalog.createDatabase(newDb("db2"), ignoreIfExists = false)
catalog.createTable("db2", newTable("tbl1"), ignoreIfExists = false)
catalog.createTable("db2", newTable("tbl2"), ignoreIfExists = false)
catalog.createFunction("db2", newFunc("func1"), ignoreIfExists = false)
catalog.createPartitions("db2", "tbl2", Seq(part1, part2), ignoreIfExists = false)
catalog.createFunction("db2", newFunc("func1"), ignoreIfExists = false)
catalog
}