aboutsummaryrefslogtreecommitdiff
path: root/R
diff options
context:
space:
mode:
authorShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-09-22 11:52:42 -0700
committerReynold Xin <rxin@databricks.com>2016-09-22 11:52:42 -0700
commit9f24a17c59b1130d97efa7d313c06577f7344338 (patch)
tree44e7430a645ae258aeae3891979b710206b5b7d2 /R
parent17b72d31e0c59711eddeb525becb8085930eadcc (diff)
downloadspark-9f24a17c59b1130d97efa7d313c06577f7344338.tar.gz
spark-9f24a17c59b1130d97efa7d313c06577f7344338.tar.bz2
spark-9f24a17c59b1130d97efa7d313c06577f7344338.zip
Skip building R vignettes if Spark is not built
## What changes were proposed in this pull request? When we build the docs separately we don't have the JAR files from the Spark build in the same tree. As the SparkR vignettes need to launch a SparkContext to be built, we skip building them if JAR files don't exist ## How was this patch tested? To test this we can run the following: ``` build/mvn -DskipTests -Psparkr clean ./R/create-docs.sh ``` You should see a line `Skipping R vignettes as Spark JARs not found` at the end Author: Shivaram Venkataraman <shivaram@cs.berkeley.edu> Closes #15200 from shivaram/sparkr-vignette-skip.
Diffstat (limited to 'R')
-rwxr-xr-xR/create-docs.sh25
1 files changed, 22 insertions, 3 deletions
diff --git a/R/create-docs.sh b/R/create-docs.sh
index 0dfba22463..69ffc5f678 100755
--- a/R/create-docs.sh
+++ b/R/create-docs.sh
@@ -30,6 +30,13 @@ set -e
# Figure out where the script is
export FWDIR="$(cd "`dirname "$0"`"; pwd)"
+export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+
+# Required for setting SPARK_SCALA_VERSION
+. "${SPARK_HOME}"/bin/load-spark-env.sh
+
+echo "Using Scala $SPARK_SCALA_VERSION"
+
pushd $FWDIR
# Install the package (this will also generate the Rd files)
@@ -45,9 +52,21 @@ Rscript -e 'libDir <- "../../lib"; library(SparkR, lib.loc=libDir); library(knit
popd
-# render creates SparkR vignettes
-Rscript -e 'library(rmarkdown); paths <- .libPaths(); .libPaths(c("lib", paths)); Sys.setenv(SPARK_HOME=tools::file_path_as_absolute("..")); render("pkg/vignettes/sparkr-vignettes.Rmd"); .libPaths(paths)'
+# Find Spark jars.
+if [ -f "${SPARK_HOME}/RELEASE" ]; then
+ SPARK_JARS_DIR="${SPARK_HOME}/jars"
+else
+ SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars"
+fi
+
+# Only create vignettes if Spark JARs exist
+if [ -d "$SPARK_JARS_DIR" ]; then
+ # render creates SparkR vignettes
+ Rscript -e 'library(rmarkdown); paths <- .libPaths(); .libPaths(c("lib", paths)); Sys.setenv(SPARK_HOME=tools::file_path_as_absolute("..")); render("pkg/vignettes/sparkr-vignettes.Rmd"); .libPaths(paths)'
-find pkg/vignettes/. -not -name '.' -not -name '*.Rmd' -not -name '*.md' -not -name '*.pdf' -not -name '*.html' -delete
+ find pkg/vignettes/. -not -name '.' -not -name '*.Rmd' -not -name '*.md' -not -name '*.pdf' -not -name '*.html' -delete
+else
+ echo "Skipping R vignettes as Spark JARs not found in $SPARK_HOME"
+fi
popd