[SPARK-28704][SQL][TEST] Add back Skiped HiveExternalCatalogVersionsSuite in HiveSparkSubmitSuite at JDK9+
### What changes were proposed in this pull request? We skip test HiveExternalCatalogVersionsSuite when testing with JAVA_9 or later because our previous version does not support JAVA_9 or later. We now add it back since we have a version supports JAVA_9 or later. ### Why are the changes needed? To recover test coverage. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Check CI logs. Closes #30451 from AngersZhuuuu/SPARK-28704. Authored-by: angerszhu <angers.zhu@gmail.com> Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
This commit is contained in:
parent
517b810dfa
commit
d7f4b2ad50
|
@ -52,7 +52,6 @@ import org.apache.spark.util.Utils
|
||||||
@ExtendedHiveTest
|
@ExtendedHiveTest
|
||||||
class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
||||||
import HiveExternalCatalogVersionsSuite._
|
import HiveExternalCatalogVersionsSuite._
|
||||||
private val isTestAtLeastJava9 = SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)
|
|
||||||
private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse")
|
private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse")
|
||||||
private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data")
|
private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data")
|
||||||
// For local test, you can set `spark.test.cache-dir` to a static value like `/tmp/test-spark`, to
|
// For local test, you can set `spark.test.cache-dir` to a static value like `/tmp/test-spark`, to
|
||||||
|
@ -60,6 +59,11 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
||||||
private val sparkTestingDir = Option(System.getProperty(SPARK_TEST_CACHE_DIR_SYSTEM_PROPERTY))
|
private val sparkTestingDir = Option(System.getProperty(SPARK_TEST_CACHE_DIR_SYSTEM_PROPERTY))
|
||||||
.map(new File(_)).getOrElse(Utils.createTempDir(namePrefix = "test-spark"))
|
.map(new File(_)).getOrElse(Utils.createTempDir(namePrefix = "test-spark"))
|
||||||
private val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
|
private val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
|
||||||
|
val hiveVersion = if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) {
|
||||||
|
"2.3.7"
|
||||||
|
} else {
|
||||||
|
"1.2.1"
|
||||||
|
}
|
||||||
|
|
||||||
override def afterAll(): Unit = {
|
override def afterAll(): Unit = {
|
||||||
try {
|
try {
|
||||||
|
@ -149,7 +153,9 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
||||||
new String(Files.readAllBytes(contentPath), StandardCharsets.UTF_8)
|
new String(Files.readAllBytes(contentPath), StandardCharsets.UTF_8)
|
||||||
}
|
}
|
||||||
|
|
||||||
private def prepare(): Unit = {
|
override def beforeAll(): Unit = {
|
||||||
|
super.beforeAll()
|
||||||
|
|
||||||
val tempPyFile = File.createTempFile("test", ".py")
|
val tempPyFile = File.createTempFile("test", ".py")
|
||||||
// scalastyle:off line.size.limit
|
// scalastyle:off line.size.limit
|
||||||
Files.write(tempPyFile.toPath,
|
Files.write(tempPyFile.toPath,
|
||||||
|
@ -199,7 +205,7 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
||||||
"--master", "local[2]",
|
"--master", "local[2]",
|
||||||
"--conf", s"${UI_ENABLED.key}=false",
|
"--conf", s"${UI_ENABLED.key}=false",
|
||||||
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
|
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
|
||||||
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=1.2.1",
|
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=$hiveVersion",
|
||||||
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
|
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
|
||||||
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
|
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
|
||||||
"--conf", s"spark.sql.test.version.index=$index",
|
"--conf", s"spark.sql.test.version.index=$index",
|
||||||
|
@ -211,23 +217,14 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
||||||
tempPyFile.delete()
|
tempPyFile.delete()
|
||||||
}
|
}
|
||||||
|
|
||||||
override def beforeAll(): Unit = {
|
|
||||||
super.beforeAll()
|
|
||||||
if (!isTestAtLeastJava9) {
|
|
||||||
prepare()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test("backward compatibility") {
|
test("backward compatibility") {
|
||||||
// TODO SPARK-28704 Test backward compatibility on JDK9+ once we have a version supports JDK9+
|
|
||||||
assume(!isTestAtLeastJava9)
|
|
||||||
val args = Seq(
|
val args = Seq(
|
||||||
"--class", PROCESS_TABLES.getClass.getName.stripSuffix("$"),
|
"--class", PROCESS_TABLES.getClass.getName.stripSuffix("$"),
|
||||||
"--name", "HiveExternalCatalog backward compatibility test",
|
"--name", "HiveExternalCatalog backward compatibility test",
|
||||||
"--master", "local[2]",
|
"--master", "local[2]",
|
||||||
"--conf", s"${UI_ENABLED.key}=false",
|
"--conf", s"${UI_ENABLED.key}=false",
|
||||||
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
|
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
|
||||||
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=1.2.1",
|
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=$hiveVersion",
|
||||||
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
|
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
|
||||||
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
|
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
|
||||||
"--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}",
|
"--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}",
|
||||||
|
@ -252,7 +249,9 @@ object PROCESS_TABLES extends QueryTest with SQLTestUtils {
|
||||||
// do not throw exception during object initialization.
|
// do not throw exception during object initialization.
|
||||||
case NonFatal(_) => Seq("3.0.1", "2.4.7") // A temporary fallback to use a specific version
|
case NonFatal(_) => Seq("3.0.1", "2.4.7") // A temporary fallback to use a specific version
|
||||||
}
|
}
|
||||||
versions.filter(v => v.startsWith("3") || !TestUtils.isPythonVersionAtLeast38())
|
versions
|
||||||
|
.filter(v => v.startsWith("3") || !TestUtils.isPythonVersionAtLeast38())
|
||||||
|
.filter(v => v.startsWith("3") || !SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9))
|
||||||
}
|
}
|
||||||
|
|
||||||
protected var spark: SparkSession = _
|
protected var spark: SparkSession = _
|
||||||
|
|
Loading…
Reference in a new issue