diff options
Diffstat (limited to 'R/pkg/R')
-rw-r--r-- | R/pkg/R/install.R | 16 | ||||
-rw-r--r-- | R/pkg/R/sparkR.R | 6 |
2 files changed, 15 insertions, 7 deletions
diff --git a/R/pkg/R/install.R b/R/pkg/R/install.R index 72386e68de..4ca7aa664e 100644 --- a/R/pkg/R/install.R +++ b/R/pkg/R/install.R @@ -21,9 +21,9 @@ #' Download and Install Apache Spark to a Local Directory #' #' \code{install.spark} downloads and installs Spark to a local directory if -#' it is not found. The Spark version we use is the same as the SparkR version. -#' Users can specify a desired Hadoop version, the remote mirror site, and -#' the directory where the package is installed locally. +#' it is not found. If SPARK_HOME is set in the environment, and that directory is found, that is +#' returned. The Spark version we use is the same as the SparkR version. Users can specify a desired +#' Hadoop version, the remote mirror site, and the directory where the package is installed locally. #' #' The full url of remote file is inferred from \code{mirrorUrl} and \code{hadoopVersion}. #' \code{mirrorUrl} specifies the remote path to a Spark folder. It is followed by a subfolder @@ -68,6 +68,16 @@ #' \href{http://spark.apache.org/downloads.html}{Apache Spark} install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL, localDir = NULL, overwrite = FALSE) { + sparkHome <- Sys.getenv("SPARK_HOME") + if (isSparkRShell()) { + stopifnot(nchar(sparkHome) > 0) + message("Spark is already running in sparkR shell.") + return(invisible(sparkHome)) + } else if (!is.na(file.info(sparkHome)$isdir)) { + message("Spark package found in SPARK_HOME: ", sparkHome) + return(invisible(sparkHome)) + } + version <- paste0("spark-", packageVersion("SparkR")) hadoopVersion <- tolower(hadoopVersion) hadoopVersionName <- hadoopVersionName(hadoopVersion) diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R index 870e76b729..61773ed3ee 100644 --- a/R/pkg/R/sparkR.R +++ b/R/pkg/R/sparkR.R @@ -588,13 +588,11 @@ processSparkPackages <- function(packages) { sparkCheckInstall <- function(sparkHome, master, deployMode) { if (!isSparkRShell()) { if (!is.na(file.info(sparkHome)$isdir)) { - msg <- paste0("Spark package found in SPARK_HOME: ", sparkHome) - message(msg) + message("Spark package found in SPARK_HOME: ", sparkHome) NULL } else { if (interactive() || isMasterLocal(master)) { - msg <- paste0("Spark not found in SPARK_HOME: ", sparkHome) - message(msg) + message("Spark not found in SPARK_HOME: ", sparkHome) packageLocalDir <- install.spark() packageLocalDir } else if (isClientMode(master) || deployMode == "client") { |