[SPARK-8603][SPARKR] Use shell() instead of system2() for SparkR on Windows
## What changes were proposed in this pull request? This PR corrects SparkR to use `shell()` instead of `system2()` on Windows. Using `system2(...)` on Windows does not process windows file separator `\`. `shell(tralsate = TRUE, ...)` can treat this problem. So, this was changed to be chosen according to OS. Existing tests were failed on Windows due to this problem. For example, those were failed. ``` 8. Failure: sparkJars tag in SparkContext (test_includeJAR.R#34) 9. Failure: sparkJars tag in SparkContext (test_includeJAR.R#36) ``` The cases above were due to using of `system2`. In addition, this PR also fixes some tests failed on Windows. ``` 5. Failure: sparkJars sparkPackages as comma-separated strings (test_context.R#128) 6. Failure: sparkJars sparkPackages as comma-separated strings (test_context.R#131) 7. Failure: sparkJars sparkPackages as comma-separated strings (test_context.R#134) ``` The cases above were due to a weird behaviour of `normalizePath()`. On Linux, if the path does not exist, it just prints out the input but it prints out including the current path on Windows. ```r # On Linus path <- normalizePath("aa") print(path) [1] "aa" # On Windows path <- normalizePath("aa") print(path) [1] "C:\\Users\\aa" ``` ## How was this patch tested? Jenkins tests and manually tested in a Window machine as below: Here is the [stdout](https://gist.github.com/HyukjinKwon/4bf35184f3a30f3bce987a58ec2bbbab) of testing. Closes #7025 Author: hyukjinkwon <gurwls223@gmail.com> Author: Hyukjin Kwon <gurwls223@gmail.com> Author: Prakash PC <prakash.chinnu@gmail.com> Closes #13165 from HyukjinKwon/pr/7025.
This commit is contained in:
parent
3fca635b4e
commit
1c403733b8
|
@ -28,6 +28,6 @@ To run the SparkR unit tests on Windows, the following steps are required —ass
|
||||||
|
|
||||||
```
|
```
|
||||||
R -e "install.packages('testthat', repos='http://cran.us.r-project.org')"
|
R -e "install.packages('testthat', repos='http://cran.us.r-project.org')"
|
||||||
.\bin\spark-submit2.cmd --conf spark.hadoop.fs.defualt.name="file:///" R\pkg\tests\run-all.R
|
.\bin\spark-submit2.cmd --conf spark.hadoop.fs.default.name="file:///" R\pkg\tests\run-all.R
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ determineSparkSubmitBin <- function() {
|
||||||
if (.Platform$OS.type == "unix") {
|
if (.Platform$OS.type == "unix") {
|
||||||
sparkSubmitBinName <- "spark-submit"
|
sparkSubmitBinName <- "spark-submit"
|
||||||
} else {
|
} else {
|
||||||
sparkSubmitBinName <- "spark-submit.cmd"
|
sparkSubmitBinName <- "spark-submit2.cmd"
|
||||||
}
|
}
|
||||||
sparkSubmitBinName
|
sparkSubmitBinName
|
||||||
}
|
}
|
||||||
|
@ -69,5 +69,5 @@ launchBackend <- function(args, sparkHome, jars, sparkSubmitOpts, packages) {
|
||||||
}
|
}
|
||||||
combinedArgs <- generateSparkSubmitArgs(args, sparkHome, jars, sparkSubmitOpts, packages)
|
combinedArgs <- generateSparkSubmitArgs(args, sparkHome, jars, sparkSubmitOpts, packages)
|
||||||
cat("Launching java with spark-submit command", sparkSubmitBin, combinedArgs, "\n")
|
cat("Launching java with spark-submit command", sparkSubmitBin, combinedArgs, "\n")
|
||||||
invisible(system2(sparkSubmitBin, combinedArgs, wait = F))
|
invisible(launchScript(sparkSubmitBin, combinedArgs))
|
||||||
}
|
}
|
||||||
|
|
|
@ -664,3 +664,12 @@ varargsToJProperties <- function(...) {
|
||||||
}
|
}
|
||||||
props
|
props
|
||||||
}
|
}
|
||||||
|
|
||||||
|
launchScript <- function(script, combinedArgs, capture = FALSE) {
|
||||||
|
if (.Platform$OS.type == "windows") {
|
||||||
|
scriptWithArgs <- paste(script, combinedArgs, sep = " ")
|
||||||
|
shell(scriptWithArgs, translate = TRUE, wait = capture, intern = capture) # nolint
|
||||||
|
} else {
|
||||||
|
system2(script, combinedArgs, wait = capture, stdout = capture)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
26
R/pkg/inst/tests/testthat/test_Windows.R
Normal file
26
R/pkg/inst/tests/testthat/test_Windows.R
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership.
|
||||||
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
# (the "License"); you may not use this file except in compliance with
|
||||||
|
# the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
context("Windows-specific tests")
|
||||||
|
|
||||||
|
test_that("sparkJars tag in SparkContext", {
|
||||||
|
if (.Platform$OS.type != "windows") {
|
||||||
|
skip("This test is only for Windows, skipped")
|
||||||
|
}
|
||||||
|
testOutput <- launchScript("ECHO", "a/b/c", capture = TRUE)
|
||||||
|
abcPath <- testOutput[1]
|
||||||
|
expect_equal(abcPath, "a\\b\\c")
|
||||||
|
})
|
|
@ -129,13 +129,13 @@ test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whiteli
|
||||||
test_that("sparkJars sparkPackages as comma-separated strings", {
|
test_that("sparkJars sparkPackages as comma-separated strings", {
|
||||||
expect_warning(processSparkJars(" a, b "))
|
expect_warning(processSparkJars(" a, b "))
|
||||||
jars <- suppressWarnings(processSparkJars(" a, b "))
|
jars <- suppressWarnings(processSparkJars(" a, b "))
|
||||||
expect_equal(jars, c("a", "b"))
|
expect_equal(lapply(jars, basename), list("a", "b"))
|
||||||
|
|
||||||
jars <- suppressWarnings(processSparkJars(" abc ,, def "))
|
jars <- suppressWarnings(processSparkJars(" abc ,, def "))
|
||||||
expect_equal(jars, c("abc", "def"))
|
expect_equal(lapply(jars, basename), list("abc", "def"))
|
||||||
|
|
||||||
jars <- suppressWarnings(processSparkJars(c(" abc ,, def ", "", "xyz", " ", "a,b")))
|
jars <- suppressWarnings(processSparkJars(c(" abc ,, def ", "", "xyz", " ", "a,b")))
|
||||||
expect_equal(jars, c("abc", "def", "xyz", "a", "b"))
|
expect_equal(lapply(jars, basename), list("abc", "def", "xyz", "a", "b"))
|
||||||
|
|
||||||
p <- processSparkPackages(c("ghi", "lmn"))
|
p <- processSparkPackages(c("ghi", "lmn"))
|
||||||
expect_equal(p, c("ghi", "lmn"))
|
expect_equal(p, c("ghi", "lmn"))
|
||||||
|
|
|
@ -21,10 +21,9 @@ runScript <- function() {
|
||||||
sparkTestJarPath <- "R/lib/SparkR/test_support/sparktestjar_2.10-1.0.jar"
|
sparkTestJarPath <- "R/lib/SparkR/test_support/sparktestjar_2.10-1.0.jar"
|
||||||
jarPath <- paste("--jars", shQuote(file.path(sparkHome, sparkTestJarPath)))
|
jarPath <- paste("--jars", shQuote(file.path(sparkHome, sparkTestJarPath)))
|
||||||
scriptPath <- file.path(sparkHome, "R/lib/SparkR/tests/testthat/jarTest.R")
|
scriptPath <- file.path(sparkHome, "R/lib/SparkR/tests/testthat/jarTest.R")
|
||||||
submitPath <- file.path(sparkHome, "bin/spark-submit")
|
submitPath <- file.path(sparkHome, paste("bin/", determineSparkSubmitBin(), sep = ""))
|
||||||
res <- system2(command = submitPath,
|
combinedArgs <- paste(jarPath, scriptPath, sep = " ")
|
||||||
args = c(jarPath, scriptPath),
|
res <- launchScript(submitPath, combinedArgs, capture = TRUE)
|
||||||
stdout = TRUE)
|
|
||||||
tail(res, 2)
|
tail(res, 2)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue