aboutsummaryrefslogtreecommitdiff
path: root/R
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2017-02-14 13:51:27 -0800
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2017-02-14 13:51:27 -0800
commita3626ca333e6e1881e2f09ccae0fa8fa7243223e (patch)
tree755256c152a2e6340f1ad3578bfaba8518e4a877 /R
parentab9872db1f9c0f289541ec5756d1a142d85545ce (diff)
downloadspark-a3626ca333e6e1881e2f09ccae0fa8fa7243223e.tar.gz
spark-a3626ca333e6e1881e2f09ccae0fa8fa7243223e.tar.bz2
spark-a3626ca333e6e1881e2f09ccae0fa8fa7243223e.zip
[SPARK-19387][SPARKR] Tests do not run with SparkR source package in CRAN check
## What changes were proposed in this pull request? - this is cause by changes in SPARK-18444, SPARK-18643 that we no longer install Spark when `master = ""` (default), but also related to SPARK-18449 since the real `master` value is not known at the time the R code in `sparkR.session` is run. (`master` cannot default to "local" since it could be overridden by spark-submit commandline or spark config) - as a result, while running SparkR as a package in IDE is working fine, CRAN check is not as it is launching it via non-interactive script - fix is to add check to the beginning of each test and vignettes; the same would also work by changing `sparkR.session()` to `sparkR.session(master = "local")` in tests, but I think being more explicit is better. ## How was this patch tested? Tested this by reverting version to 2.1, since it needs to download the release jar with matching version. But since there are changes in 2.2 (specifically around SparkR ML) that are incompatible with 2.1, some tests are failing in this config. Will need to port this to branch-2.1 and retest with 2.1 release jar. manually as: ``` # modify DESCRIPTION to revert version to 2.1.0 SPARK_HOME=/usr/spark R CMD build pkg # run cran check without SPARK_HOME R CMD check --as-cran SparkR_2.1.0.tar.gz ``` Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #16720 from felixcheung/rcranchecktest.
Diffstat (limited to 'R')
-rw-r--r--R/pkg/R/install.R16
-rw-r--r--R/pkg/R/sparkR.R6
-rw-r--r--R/pkg/tests/run-all.R3
-rw-r--r--R/pkg/vignettes/sparkr-vignettes.Rmd3
4 files changed, 21 insertions, 7 deletions
diff --git a/R/pkg/R/install.R b/R/pkg/R/install.R
index 72386e68de..4ca7aa664e 100644
--- a/R/pkg/R/install.R
+++ b/R/pkg/R/install.R
@@ -21,9 +21,9 @@
#' Download and Install Apache Spark to a Local Directory
#'
#' \code{install.spark} downloads and installs Spark to a local directory if
-#' it is not found. The Spark version we use is the same as the SparkR version.
-#' Users can specify a desired Hadoop version, the remote mirror site, and
-#' the directory where the package is installed locally.
+#' it is not found. If SPARK_HOME is set in the environment, and that directory is found, that is
+#' returned. The Spark version we use is the same as the SparkR version. Users can specify a desired
+#' Hadoop version, the remote mirror site, and the directory where the package is installed locally.
#'
#' The full url of remote file is inferred from \code{mirrorUrl} and \code{hadoopVersion}.
#' \code{mirrorUrl} specifies the remote path to a Spark folder. It is followed by a subfolder
@@ -68,6 +68,16 @@
#' \href{http://spark.apache.org/downloads.html}{Apache Spark}
install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
localDir = NULL, overwrite = FALSE) {
+ sparkHome <- Sys.getenv("SPARK_HOME")
+ if (isSparkRShell()) {
+ stopifnot(nchar(sparkHome) > 0)
+ message("Spark is already running in sparkR shell.")
+ return(invisible(sparkHome))
+ } else if (!is.na(file.info(sparkHome)$isdir)) {
+ message("Spark package found in SPARK_HOME: ", sparkHome)
+ return(invisible(sparkHome))
+ }
+
version <- paste0("spark-", packageVersion("SparkR"))
hadoopVersion <- tolower(hadoopVersion)
hadoopVersionName <- hadoopVersionName(hadoopVersion)
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 870e76b729..61773ed3ee 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -588,13 +588,11 @@ processSparkPackages <- function(packages) {
sparkCheckInstall <- function(sparkHome, master, deployMode) {
if (!isSparkRShell()) {
if (!is.na(file.info(sparkHome)$isdir)) {
- msg <- paste0("Spark package found in SPARK_HOME: ", sparkHome)
- message(msg)
+ message("Spark package found in SPARK_HOME: ", sparkHome)
NULL
} else {
if (interactive() || isMasterLocal(master)) {
- msg <- paste0("Spark not found in SPARK_HOME: ", sparkHome)
- message(msg)
+ message("Spark not found in SPARK_HOME: ", sparkHome)
packageLocalDir <- install.spark()
packageLocalDir
} else if (isClientMode(master) || deployMode == "client") {
diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R
index 1d04656ac2..ab8d1ca019 100644
--- a/R/pkg/tests/run-all.R
+++ b/R/pkg/tests/run-all.R
@@ -21,4 +21,7 @@ library(SparkR)
# Turn all warnings into errors
options("warn" = 2)
+# Setup global test environment
+install.spark()
+
test_package("SparkR")
diff --git a/R/pkg/vignettes/sparkr-vignettes.Rmd b/R/pkg/vignettes/sparkr-vignettes.Rmd
index f13e0b3a18..a742484c4c 100644
--- a/R/pkg/vignettes/sparkr-vignettes.Rmd
+++ b/R/pkg/vignettes/sparkr-vignettes.Rmd
@@ -44,6 +44,9 @@ library(SparkR)
We use default settings in which it runs in local mode. It auto downloads Spark package in the background if no previous installation is found. For more details about setup, see [Spark Session](#SetupSparkSession).
+```{r, include=FALSE}
+install.spark()
+```
```{r, message=FALSE, results="hide"}
sparkR.session()
```