[SPARK-16220][SQL] Revert Change to Bring Back SHOW FUNCTIONS Functionality

## What changes were proposed in this pull request?

- Fix tests regarding show functions functionality
- Revert `catalog.ListFunctions` and `SHOW FUNCTIONS` to return to `Spark 1.X` functionality.

Cherry picked changes from this PR: https://github.com/apache/spark/pull/13413/files

## How was this patch tested?

Unit tests.

Author: Bill Chambers <bill@databricks.com>
Author: Bill Chambers <wchambers@ischool.berkeley.edu>

Closes #13916 from anabranch/master.
This commit is contained in:
Bill Chambers 2016-06-27 11:50:34 -07:00 committed by Herman van Hovell
parent 3e4e868c85
commit c48c8ebc0a
5 changed files with 22 additions and 9 deletions

View file

@ -1509,7 +1509,17 @@ class SQLTests(ReusedPySparkTestCase):
spark.sql("CREATE DATABASE some_db")
functions = dict((f.name, f) for f in spark.catalog.listFunctions())
functionsDefault = dict((f.name, f) for f in spark.catalog.listFunctions("default"))
self.assertEquals(len(functions), 0)
self.assertTrue(len(functions) > 200)
self.assertTrue("+" in functions)
self.assertTrue("like" in functions)
self.assertTrue("month" in functions)
self.assertTrue("to_unix_timestamp" in functions)
self.assertTrue("current_database" in functions)
self.assertEquals(functions["+"], Function(
name="+",
description=None,
className="org.apache.spark.sql.catalyst.expressions.Add",
isTemporary=True))
self.assertEquals(functions, functionsDefault)
spark.catalog.registerFunction("temp_func", lambda x: str(x))
spark.sql("CREATE FUNCTION func1 AS 'org.apache.spark.data.bricks'")

View file

@ -855,8 +855,7 @@ class SessionCatalog(
.map { f => FunctionIdentifier(f, Some(dbName)) }
val loadedFunctions = StringUtils.filterPattern(functionRegistry.listFunction(), pattern)
.map { f => FunctionIdentifier(f) }
(dbFunctions ++ loadedFunctions)
.filterNot(f => FunctionRegistry.functionSet.contains(f.funcName))
dbFunctions ++ loadedFunctions
}

View file

@ -80,13 +80,10 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
val functions = Array("ilog", "logi", "logii", "logiii", "crc32i", "cubei", "cume_disti",
"isize", "ispace", "to_datei", "date_addi", "current_datei")
assert(sql("SHOW functions").collect().isEmpty)
createFunction(functions)
checkAnswer(sql("SHOW functions"), getFunctions("*"))
assert(sql("SHOW functions").collect().size === functions.size)
assert(sql("SHOW functions").collect().toSet === functions.map(Row(_)).toSet)
assert(sql("SHOW functions").collect().size > 200)
Seq("^c*", "*e$", "log*", "*date*").foreach { pattern =>
// For the pattern part, only '*' and '|' are allowed as wildcards.

View file

@ -174,7 +174,8 @@ class CatalogSuite
}
test("list functions") {
assert(spark.catalog.listFunctions().collect().isEmpty)
assert(Set("+", "current_database", "window").subsetOf(
spark.catalog.listFunctions().collect().map(_.name).toSet))
createFunction("my_func1")
createFunction("my_func2")
createTempFunction("my_temp_func")
@ -191,7 +192,8 @@ class CatalogSuite
}
test("list functions with database") {
assert(spark.catalog.listFunctions("default").collect().isEmpty)
assert(Set("+", "current_database", "window").subsetOf(
spark.catalog.listFunctions().collect().map(_.name).toSet))
createDatabase("my_db1")
createDatabase("my_db2")
createFunction("my_func1", Some("my_db1"))

View file

@ -187,6 +187,11 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("show functions") {
val allBuiltinFunctions = FunctionRegistry.builtin.listFunction().toSet[String].toList.sorted
val allFunctions = sql("SHOW functions").collect().map(r => r(0))
allBuiltinFunctions.foreach { f =>
assert(allFunctions.contains(f))
}
withTempDatabase { db =>
def createFunction(names: Seq[String]): Unit = {
names.foreach { name =>