diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R index e4a11a5f78..924ebbf37e 100644 --- a/R/pkg/R/sparkR.R +++ b/R/pkg/R/sparkR.R @@ -655,6 +655,40 @@ sparkCheckInstall <- function(sparkHome, master, deployMode) { } else { if (interactive() || isMasterLocal(master)) { message("Spark not found in SPARK_HOME: ", sparkHome) + # If EXISTING_SPARKR_BACKEND_PORT environment variable is set, assume + # that we're in Spark submit. spark-submit always sets Spark home + # so this case should not happen. This is just a safeguard. + isSparkRSubmit <- Sys.getenv("EXISTING_SPARKR_BACKEND_PORT", "") != "" + + # SPARKR_ASK_INSTALLATION is an internal environment variable in case + # users want to disable this behavior. This environment variable should + # be removed if no user complains. This environment variable was added + # in case other notebook projects are affected. + if (!isSparkRSubmit && Sys.getenv("SPARKR_ASK_INSTALLATION", "TRUE") == "TRUE") { + # Finally, we're either plain R shell or Rscript. + msg <- paste0( + "Will you download and install (or reuse if it exists) Spark package ", + "under the cache [", sparkCachePath(), "]? (y/n): ") + + answer <- NA + while (is.na(answer) || (answer != "y" && answer != "n")) { + # Dispatch on R shell in case readLines does not work in RStudio + # See https://stackoverflow.com/questions/30191232/use-stdin-from-within-r-studio + if (interactive()) { + answer <- readline(prompt = msg) + } else { + cat(msg) + answer <- readLines("stdin", n = 1) + } + } + if (answer == "n") { + stop(paste0( + "Please make sure Spark package is installed in this machine.\n", + " - If there is one, set the path in sparkHome parameter or ", + "environment variable SPARK_HOME.\n", + " - If not, you may run install.spark function to do the job.")) + } + } packageLocalDir <- install.spark() packageLocalDir } else if (isClientMode(master) || deployMode == "client") { diff --git a/docs/sparkr-migration-guide.md b/docs/sparkr-migration-guide.md index 32836cdac5..3a937b729a 100644 --- a/docs/sparkr-migration-guide.md +++ b/docs/sparkr-migration-guide.md @@ -26,6 +26,10 @@ Note that this migration guide describes the items specific to SparkR. Many items of SQL migration can be applied when migrating SparkR to higher versions. Please refer [Migration Guide: SQL, Datasets and DataFrame](sql-migration-guide.html). +## Upgrading from SparkR 3.1 to 3.2 + + - Previously, SparkR automatically downloaded and installed the Spark distribution in user' cache directory to complete SparkR installation when SparkR runs in a plain R shell or Rscript, and the Spark distribution cannot be found. Now, it asks if users want to download and install or not. To restore the previous behavior, set `SPARKR_ASK_INSTALLATION` environment variable to `FALSE`. + ## Upgrading from SparkR 2.4 to 3.0 - The deprecated methods `parquetFile`, `saveAsParquetFile`, `jsonFile`, `jsonRDD` have been removed. Use `read.parquet`, `write.parquet`, `read.json` instead.