aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-07-19 19:28:08 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-07-19 19:28:08 -0700
commitfc23263623d5dcd1167fa93c094fe41ace77c326 (patch)
tree57ce42fdbaee938b7079ad438ca6513846a73b1a
parent9674af6f6f81066139ea675de724f951bd0d49c9 (diff)
downloadspark-fc23263623d5dcd1167fa93c094fe41ace77c326.tar.gz
spark-fc23263623d5dcd1167fa93c094fe41ace77c326.tar.bz2
spark-fc23263623d5dcd1167fa93c094fe41ace77c326.zip
[SPARK-10683][SPARK-16510][SPARKR] Move SparkR include jar test to SparkSubmitSuite
## What changes were proposed in this pull request? This change moves the include jar test from R to SparkSubmitSuite and uses a dynamically compiled jar. This helps us remove the binary jar from the R package and solves both the CRAN warnings and the lack of source being available for this jar. ## How was this patch tested? SparkR unit tests, SparkSubmitSuite, check-cran.sh Author: Shivaram Venkataraman <shivaram@cs.berkeley.edu> Closes #14243 from shivaram/sparkr-jar-move.
-rw-r--r--R/pkg/inst/test_support/sparktestjar_2.10-1.0.jarbin2886 -> 0 bytes
-rw-r--r--R/pkg/inst/tests/testthat/jarTest.R10
-rw-r--r--R/pkg/inst/tests/testthat/test_includeJAR.R36
-rw-r--r--core/src/main/scala/org/apache/spark/api/r/RUtils.scala9
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala38
5 files changed, 52 insertions, 41 deletions
diff --git a/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar b/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar
deleted file mode 100644
index 1d5c2af631..0000000000
--- a/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar
+++ /dev/null
Binary files differ
diff --git a/R/pkg/inst/tests/testthat/jarTest.R b/R/pkg/inst/tests/testthat/jarTest.R
index 51754a4650..c9615c8d4f 100644
--- a/R/pkg/inst/tests/testthat/jarTest.R
+++ b/R/pkg/inst/tests/testthat/jarTest.R
@@ -16,17 +16,17 @@
#
library(SparkR)
-sparkR.session()
+sc <- sparkR.session()
-helloTest <- SparkR:::callJStatic("sparkR.test.hello",
+helloTest <- SparkR:::callJStatic("sparkrtest.DummyClass",
"helloWorld",
"Dave")
+stopifnot(identical(helloTest, "Hello Dave"))
-basicFunction <- SparkR:::callJStatic("sparkR.test.basicFunction",
+basicFunction <- SparkR:::callJStatic("sparkrtest.DummyClass",
"addStuff",
2L,
2L)
+stopifnot(basicFunction == 4L)
sparkR.session.stop()
-output <- c(helloTest, basicFunction)
-writeLines(output)
diff --git a/R/pkg/inst/tests/testthat/test_includeJAR.R b/R/pkg/inst/tests/testthat/test_includeJAR.R
deleted file mode 100644
index 512dd39cb2..0000000000
--- a/R/pkg/inst/tests/testthat/test_includeJAR.R
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-context("include an external JAR in SparkContext")
-
-runScript <- function() {
- sparkHome <- Sys.getenv("SPARK_HOME")
- sparkTestJarPath <- "R/lib/SparkR/test_support/sparktestjar_2.10-1.0.jar"
- jarPath <- paste("--jars", shQuote(file.path(sparkHome, sparkTestJarPath)))
- scriptPath <- file.path(sparkHome, "R/lib/SparkR/tests/testthat/jarTest.R")
- submitPath <- file.path(sparkHome, paste("bin/", determineSparkSubmitBin(), sep = ""))
- combinedArgs <- paste(jarPath, scriptPath, sep = " ")
- res <- launchScript(submitPath, combinedArgs, capture = TRUE)
- tail(res, 2)
-}
-
-test_that("sparkJars tag in SparkContext", {
- testOutput <- runScript()
- helloTest <- testOutput[1]
- expect_equal(helloTest, "Hello, Dave")
- basicFunction <- testOutput[2]
- expect_equal(basicFunction, "4")
-})
diff --git a/core/src/main/scala/org/apache/spark/api/r/RUtils.scala b/core/src/main/scala/org/apache/spark/api/r/RUtils.scala
index 16157414fd..77825e75e5 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RUtils.scala
@@ -38,6 +38,15 @@ private[spark] object RUtils {
}
/**
+ * Check if SparkR is installed before running tests that use SparkR.
+ */
+ def isSparkRInstalled: Boolean = {
+ localSparkRPackagePath.filter { pkgDir =>
+ new File(Seq(pkgDir, "SparkR").mkString(File.separator)).exists
+ }.isDefined
+ }
+
+ /**
* Get the list of paths for R packages in various deployment modes, of which the first
* path is for the SparkR package itself. The second path is for R packages built as
* part of Spark Packages, if any exist. Spark Packages can be provided through the
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 0b020592b0..b2bc886108 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -32,6 +32,7 @@ import org.apache.spark.api.r.RUtils
import org.apache.spark.deploy.SparkSubmit._
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
import org.apache.spark.internal.Logging
+import org.apache.spark.TestUtils.JavaSourceFromString
import org.apache.spark.util.{ResetSystemProperties, Utils}
// Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch
@@ -417,6 +418,8 @@ class SparkSubmitSuite
// See https://gist.github.com/shivaram/3a2fecce60768a603dac for a error log
ignore("correctly builds R packages included in a jar with --packages") {
assume(RUtils.isRInstalled, "R isn't installed on this machine.")
+ // Check if the SparkR package is installed
+ assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val rScriptDir =
@@ -435,6 +438,41 @@ class SparkSubmitSuite
}
}
+ test("include an external JAR in SparkR") {
+ assume(RUtils.isRInstalled, "R isn't installed on this machine.")
+ val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
+ // Check if the SparkR package is installed
+ assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
+ val rScriptDir =
+ Seq(sparkHome, "R", "pkg", "inst", "tests", "testthat", "jarTest.R").mkString(File.separator)
+ assert(new File(rScriptDir).exists)
+
+ // compile a small jar containing a class that will be called from R code.
+ val tempDir = Utils.createTempDir()
+ val srcDir = new File(tempDir, "sparkrtest")
+ srcDir.mkdirs()
+ val excSource = new JavaSourceFromString(new File(srcDir, "DummyClass").getAbsolutePath,
+ """package sparkrtest;
+ |
+ |public class DummyClass implements java.io.Serializable {
+ | public static String helloWorld(String arg) { return "Hello " + arg; }
+ | public static int addStuff(int arg1, int arg2) { return arg1 + arg2; }
+ |}
+ """.stripMargin)
+ val excFile = TestUtils.createCompiledClass("DummyClass", srcDir, excSource, Seq.empty)
+ val jarFile = new File(tempDir, "sparkRTestJar-%s.jar".format(System.currentTimeMillis()))
+ val jarURL = TestUtils.createJar(Seq(excFile), jarFile, directoryPrefix = Some("sparkrtest"))
+
+ val args = Seq(
+ "--name", "testApp",
+ "--master", "local",
+ "--jars", jarURL.toString,
+ "--verbose",
+ "--conf", "spark.ui.enabled=false",
+ rScriptDir)
+ runSparkSubmit(args)
+ }
+
test("resolves command line argument paths correctly") {
val jars = "/jar1,/jar2" // --jars
val files = "hdfs:/file1,file2" // --files