[SPARK-27383][SQL][TEST] Avoid using hard-coded jar names in Hive tests

## What changes were proposed in this pull request?

This pr avoid using hard-coded jar names(`hive-contrib-0.13.1.jar` and `hive-hcatalog-core-0.13.1.jar`) in Hive tests. This change makes it easy to change when upgrading the built-in Hive to 2.3.4.

## How was this patch tested?

Existing test

Closes #24294 from wangyum/SPARK-27383.

Authored-by: Yuming Wang <yumwang@ebay.com>
Signed-off-by: Sean Owen <sean.owen@databricks.com>
This commit is contained in:
Yuming Wang 2019-04-06 18:06:52 -05:00 committed by Sean Owen
parent 53e31e2ca1
commit 017919b636
4 changed files with 21 additions and 10 deletions

View file

@ -116,6 +116,9 @@ class TestHiveContext(
@transient override val sparkSession: TestHiveSparkSession)
extends SQLContext(sparkSession) {
val HIVE_CONTRIB_JAR: String = "hive-contrib-0.13.1.jar"
val HIVE_HCATALOG_CORE_JAR: String = "hive-hcatalog-core-0.13.1.jar"
/**
* If loadTestTables is false, no test tables are loaded. Note that this flag can only be true
* when running in the JVM, i.e. it needs to be false when calling from Python.
@ -142,6 +145,14 @@ class TestHiveContext(
sparkSession.getHiveFile(path)
}
def getHiveContribJar(): File = {
sparkSession.getHiveFile(HIVE_CONTRIB_JAR)
}
def getHiveHcatalogCoreJar(): File = {
sparkSession.getHiveFile(HIVE_HCATALOG_CORE_JAR)
}
def loadTestTable(name: String): Unit = {
sparkSession.loadTestTable(name)
}

View file

@ -108,8 +108,8 @@ class HiveSparkSubmitSuite
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val jar1 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassA"))
val jar2 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassB"))
val jar3 = TestHive.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
val jar4 = TestHive.getHiveFile("hive-hcatalog-core-0.13.1.jar").getCanonicalPath
val jar3 = TestHive.getHiveContribJar().getCanonicalPath
val jar4 = TestHive.getHiveHcatalogCoreJar().getCanonicalPath
val jarsString = Seq(jar1, jar2, jar3, jar4).map(j => j.toString).mkString(",")
val args = Seq(
"--class", SparkSubmitClassLoaderTest.getClass.getName.stripSuffix("$"),
@ -315,7 +315,7 @@ class HiveSparkSubmitSuite
"--master", "local-cluster[2,1,1024]",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
"--jars", TestHive.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath,
"--jars", TestHive.getHiveContribJar().getCanonicalPath,
unusedJar.toString)
runSparkSubmit(argsForCreateTable)
@ -457,7 +457,7 @@ object TemporaryHiveUDFTest extends Logging {
// Load a Hive UDF from the jar.
logInfo("Registering a temporary Hive UDF provided in a jar.")
val jar = hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
val jar = hiveContext.getHiveContribJar().getCanonicalPath
hiveContext.sql(
s"""
|CREATE TEMPORARY FUNCTION example_max
@ -495,7 +495,7 @@ object PermanentHiveUDFTest1 extends Logging {
// Load a Hive UDF from the jar.
logInfo("Registering a permanent Hive UDF provided in a jar.")
val jar = hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
val jar = hiveContext.getHiveContribJar().getCanonicalPath
hiveContext.sql(
s"""
|CREATE FUNCTION example_max
@ -532,7 +532,7 @@ object PermanentHiveUDFTest2 extends Logging {
val hiveContext = new TestHiveContext(sc)
// Load a Hive UDF from the jar.
logInfo("Write the metadata of a permanent Hive UDF into metastore.")
val jar = hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
val jar = hiveContext.getHiveContribJar().getCanonicalPath
val function = CatalogFunction(
FunctionIdentifier("example_max"),
"org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax",

View file

@ -816,7 +816,7 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
test("ADD JAR command 2") {
// this is a test case from mapjoin_addjar.q
val testJar = TestHive.getHiveFile("hive-hcatalog-core-0.13.1.jar").toURI
val testJar = TestHive.getHiveHcatalogCoreJar().toURI
val testData = TestHive.getHiveFile("data/files/sample.json").toURI
sql(s"ADD JAR $testJar")
sql(
@ -826,9 +826,9 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
sql("select * from src join t1 on src.key = t1.a")
sql("DROP TABLE t1")
assert(sql("list jars").
filter(_.getString(0).contains("hive-hcatalog-core-0.13.1.jar")).count() > 0)
filter(_.getString(0).contains(TestHive.HIVE_HCATALOG_CORE_JAR)).count() > 0)
assert(sql("list jar").
filter(_.getString(0).contains("hive-hcatalog-core-0.13.1.jar")).count() > 0)
filter(_.getString(0).contains(TestHive.HIVE_HCATALOG_CORE_JAR)).count() > 0)
val testJar2 = TestHive.getHiveFile("TestUDTF.jar").getCanonicalPath
sql(s"ADD JAR $testJar2")
assert(sql(s"list jar $testJar").count() == 1)

View file

@ -1105,7 +1105,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
override def run() {
// To make sure this test works, this jar should not be loaded in another place.
sql(
s"ADD JAR ${hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath()}")
s"ADD JAR ${hiveContext.getHiveContribJar().getCanonicalPath}")
try {
sql(
"""