[SPARK-25572][SPARKR] test only if not cran
## What changes were proposed in this pull request? CRAN doesn't seem to respect the system requirements as running tests - we have seen cases where SparkR is run on Java 10, which unfortunately Spark does not start on. For 2.4, lets attempt skipping all tests ## How was this patch tested? manual, jenkins, appveyor Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #22589 from felixcheung/ralltests.
This commit is contained in:
parent
f246813afb
commit
f4b138082f
|
@ -18,50 +18,55 @@
|
|||
library(testthat)
|
||||
library(SparkR)
|
||||
|
||||
# Turn all warnings into errors
|
||||
options("warn" = 2)
|
||||
|
||||
if (.Platform$OS.type == "windows") {
|
||||
Sys.setenv(TZ = "GMT")
|
||||
}
|
||||
|
||||
# Setup global test environment
|
||||
# Install Spark first to set SPARK_HOME
|
||||
|
||||
# NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on
|
||||
# CRAN machines. For Jenkins we should already have SPARK_HOME set.
|
||||
install.spark(overwrite = TRUE)
|
||||
|
||||
sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
|
||||
sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
|
||||
invisible(lapply(sparkRWhitelistSQLDirs,
|
||||
function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)}))
|
||||
sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
|
||||
|
||||
sparkRTestMaster <- "local[1]"
|
||||
sparkRTestConfig <- list()
|
||||
# SPARK-25572
|
||||
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
|
||||
sparkRTestMaster <- ""
|
||||
} else {
|
||||
# Disable hsperfdata on CRAN
|
||||
old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
|
||||
Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
|
||||
tmpDir <- tempdir()
|
||||
tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
|
||||
sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
|
||||
spark.executor.extraJavaOptions = tmpArg)
|
||||
|
||||
# Turn all warnings into errors
|
||||
options("warn" = 2)
|
||||
|
||||
if (.Platform$OS.type == "windows") {
|
||||
Sys.setenv(TZ = "GMT")
|
||||
}
|
||||
|
||||
# Setup global test environment
|
||||
# Install Spark first to set SPARK_HOME
|
||||
|
||||
# NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on
|
||||
# CRAN machines. For Jenkins we should already have SPARK_HOME set.
|
||||
install.spark(overwrite = TRUE)
|
||||
|
||||
sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
|
||||
sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
|
||||
invisible(lapply(sparkRWhitelistSQLDirs,
|
||||
function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)}))
|
||||
sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
|
||||
|
||||
sparkRTestMaster <- "local[1]"
|
||||
sparkRTestConfig <- list()
|
||||
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
|
||||
sparkRTestMaster <- ""
|
||||
} else {
|
||||
# Disable hsperfdata on CRAN
|
||||
old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
|
||||
Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
|
||||
tmpDir <- tempdir()
|
||||
tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
|
||||
sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
|
||||
spark.executor.extraJavaOptions = tmpArg)
|
||||
}
|
||||
|
||||
test_package("SparkR")
|
||||
|
||||
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
|
||||
# set random seed for predictable results. mostly for base's sample() in tree and classification
|
||||
set.seed(42)
|
||||
# for testthat 1.0.2 later, change reporter from "summary" to default_reporter()
|
||||
testthat:::run_tests("SparkR",
|
||||
file.path(sparkRDir, "pkg", "tests", "fulltests"),
|
||||
NULL,
|
||||
"summary")
|
||||
}
|
||||
|
||||
SparkR:::uninstallDownloadedSpark()
|
||||
|
||||
}
|
||||
|
||||
test_package("SparkR")
|
||||
|
||||
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
|
||||
# set random seed for predictable results. mostly for base's sample() in tree and classification
|
||||
set.seed(42)
|
||||
# for testthat 1.0.2 later, change reporter from "summary" to default_reporter()
|
||||
testthat:::run_tests("SparkR",
|
||||
file.path(sparkRDir, "pkg", "tests", "fulltests"),
|
||||
NULL,
|
||||
"summary")
|
||||
}
|
||||
|
||||
SparkR:::uninstallDownloadedSpark()
|
||||
|
|
Loading…
Reference in a new issue