[SPARK-22047][FLAKY TEST] HiveExternalCatalogVersionsSuite
## What changes were proposed in this pull request? This PR tries to download Spark for each test run, to make sure each test run is absolutely isolated. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #19265 from cloud-fan/test.
This commit is contained in:
parent
94f7e046a2
commit
10f45b3c84
|
@ -35,16 +35,18 @@ import org.apache.spark.util.Utils
|
|||
* expected version under this local directory, e.g. `/tmp/spark-test/spark-2.0.3`, we will skip the
|
||||
* downloading for this spark version.
|
||||
*/
|
||||
@org.scalatest.Ignore
|
||||
class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
||||
private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse")
|
||||
private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data")
|
||||
private val sparkTestingDir = "/tmp/spark-test"
|
||||
// For local test, you can set `sparkTestingDir` to a static value like `/tmp/test-spark`, to
|
||||
// avoid downloading Spark of different versions in each run.
|
||||
private val sparkTestingDir = Utils.createTempDir(namePrefix = "test-spark")
|
||||
private val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
|
||||
|
||||
override def afterAll(): Unit = {
|
||||
Utils.deleteRecursively(wareHousePath)
|
||||
Utils.deleteRecursively(tmpDataDir)
|
||||
Utils.deleteRecursively(sparkTestingDir)
|
||||
super.afterAll()
|
||||
}
|
||||
|
||||
|
@ -53,7 +55,7 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
|
|||
|
||||
val url = s"https://d3kbcqa49mib13.cloudfront.net/spark-$version-bin-hadoop2.7.tgz"
|
||||
|
||||
Seq("wget", url, "-q", "-P", sparkTestingDir).!
|
||||
Seq("wget", url, "-q", "-P", sparkTestingDir.getCanonicalPath).!
|
||||
|
||||
val downloaded = new File(sparkTestingDir, s"spark-$version-bin-hadoop2.7.tgz").getCanonicalPath
|
||||
val targetDir = new File(sparkTestingDir, s"spark-$version").getCanonicalPath
|
||||
|
|
Loading…
Reference in a new issue