[SPARK-19990][SQL][TEST-MAVEN] create a temp file for file in test.jar's resource when run mvn test accross different modules
## What changes were proposed in this pull request? After we have merged the `HiveDDLSuite` and `DDLSuite` in [SPARK-19235](https://issues.apache.org/jira/browse/SPARK-19235), we have two subclasses of `DDLSuite`, that is `HiveCatalogedDDLSuite` and `InMemoryCatalogDDLSuite`. While `DDLSuite` is in `sql/core module`, and `HiveCatalogedDDLSuite` is in `sql/hive module`, if we mvn test `HiveCatalogedDDLSuite`, it will run the test in its parent class `DDLSuite`, this will cause some test case failed which will get and use the test file path in `sql/core module` 's `resource`. Because the test file path getted will start with 'jar:' like "jar:file:/home/jenkins/workspace/spark-master-test-maven-hadoop-2.6/sql/core/target/spark-sql_2.11-2.2.0-SNAPSHOT-tests.jar!/test-data/cars.csv", which will failed when new Path() in datasource.scala This PR fix this by copy file from resource to a temp dir. ## How was this patch tested? N/A Author: windpiger <songjun@outlook.com> Closes #17338 from windpiger/fixtestfailemvn.
This commit is contained in:
parent
8163911594
commit
7ce30e00b2
|
@ -25,7 +25,7 @@ import org.scalatest.BeforeAndAfterEach
|
|||
|
||||
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
|
||||
import org.apache.spark.sql.catalyst.TableIdentifier
|
||||
import org.apache.spark.sql.catalyst.analysis.{DatabaseAlreadyExistsException, FunctionRegistry, NoSuchPartitionException, NoSuchTableException, TempTableAlreadyExistsException}
|
||||
import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchPartitionException, NoSuchTableException, TempTableAlreadyExistsException}
|
||||
import org.apache.spark.sql.catalyst.catalog._
|
||||
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
|
||||
import org.apache.spark.sql.internal.SQLConf
|
||||
|
@ -699,21 +699,28 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
|
|||
}
|
||||
|
||||
test("create temporary view using") {
|
||||
val csvFile =
|
||||
Thread.currentThread().getContextClassLoader.getResource("test-data/cars.csv").toString
|
||||
withView("testview") {
|
||||
sql(s"CREATE OR REPLACE TEMPORARY VIEW testview (c1 String, c2 String) USING " +
|
||||
"org.apache.spark.sql.execution.datasources.csv.CSVFileFormat " +
|
||||
s"OPTIONS (PATH '$csvFile')")
|
||||
// when we test the HiveCatalogedDDLSuite, it will failed because the csvFile path above
|
||||
// starts with 'jar:', and it is an illegal parameter for Path, so here we copy it
|
||||
// to a temp file by withResourceTempPath
|
||||
withResourceTempPath("test-data/cars.csv") { tmpFile =>
|
||||
withView("testview") {
|
||||
sql(s"CREATE OR REPLACE TEMPORARY VIEW testview (c1 String, c2 String) USING " +
|
||||
"org.apache.spark.sql.execution.datasources.csv.CSVFileFormat " +
|
||||
s"OPTIONS (PATH '$tmpFile')")
|
||||
|
||||
checkAnswer(
|
||||
sql("select c1, c2 from testview order by c1 limit 1"),
|
||||
checkAnswer(
|
||||
sql("select c1, c2 from testview order by c1 limit 1"),
|
||||
Row("1997", "Ford") :: Nil)
|
||||
|
||||
// Fails if creating a new view with the same name
|
||||
intercept[TempTableAlreadyExistsException] {
|
||||
sql(s"CREATE TEMPORARY VIEW testview USING " +
|
||||
s"org.apache.spark.sql.execution.datasources.csv.CSVFileFormat OPTIONS (PATH '$csvFile')")
|
||||
// Fails if creating a new view with the same name
|
||||
intercept[TempTableAlreadyExistsException] {
|
||||
sql(
|
||||
s"""
|
||||
|CREATE TEMPORARY VIEW testview
|
||||
|USING org.apache.spark.sql.execution.datasources.csv.CSVFileFormat
|
||||
|OPTIONS (PATH '$tmpFile')
|
||||
""".stripMargin)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,10 +19,10 @@ package org.apache.spark.sql.test
|
|||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.util.UUID
|
||||
|
||||
import scala.language.implicitConversions
|
||||
import scala.util.Try
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
import org.apache.hadoop.fs.Path
|
||||
|
@ -123,6 +123,21 @@ private[sql] trait SQLTestUtils
|
|||
try f(path) finally Utils.deleteRecursively(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy file in jar's resource to a temp file, then pass it to `f`.
|
||||
* This function is used to make `f` can use the path of temp file(e.g. file:/), instead of
|
||||
* path of jar's resource which starts with 'jar:file:/'
|
||||
*/
|
||||
protected def withResourceTempPath(resourcePath: String)(f: File => Unit): Unit = {
|
||||
val inputStream =
|
||||
Thread.currentThread().getContextClassLoader.getResourceAsStream(resourcePath)
|
||||
withTempDir { dir =>
|
||||
val tmpFile = new File(dir, "tmp")
|
||||
Files.copy(inputStream, tmpFile.toPath)
|
||||
f(tmpFile)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temporary directory, which is then passed to `f` and will be deleted after `f`
|
||||
* returns.
|
||||
|
|
Loading…
Reference in a new issue