aboutsummaryrefslogtreecommitdiff
path: root/R/pkg/inst
diff options
context:
space:
mode:
authorwm624@hotmail.com <wm624@hotmail.com>2016-05-25 21:08:03 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-05-25 21:08:03 -0700
commit06bae8af17d9478c889d206a4556a697b5d629e7 (patch)
tree25c20cec0de0d1d39fdef54abf873d59cf79ed82 /R/pkg/inst
parent06ed1fa3e45adfc11b0f615cb8b97c99fadc735f (diff)
downloadspark-06bae8af17d9478c889d206a4556a697b5d629e7.tar.gz
spark-06bae8af17d9478c889d206a4556a697b5d629e7.tar.bz2
spark-06bae8af17d9478c889d206a4556a697b5d629e7.zip
[SPARK-15439][SPARKR] Failed to run unit test in SparkR
## What changes were proposed in this pull request? (Please fill in changes proposed in this fix) There are some failures when running SparkR unit tests. In this PR, I fixed two of these failures in test_context.R and test_sparkSQL.R The first one is due to different masked name. I added missed names in the expected arrays. The second one is because one PR removed the logic of a previous fix of missing subset method. The file privilege issue is still there. I am debugging it. SparkR shell can run the test case successfully. test_that("pipeRDD() on RDDs", { actual <- collect(pipeRDD(rdd, "more")) When using run-test script, it complains no such directories as below: cannot open file '/tmp/Rtmp4FQbah/filee2273f9d47f7': No such file or directory ## How was this patch tested? (Please explain how this patch was tested. E.g. unit tests, integration tests, manual tests) Manually test it Author: wm624@hotmail.com <wm624@hotmail.com> Closes #13284 from wangmiao1981/R.
Diffstat (limited to 'R/pkg/inst')
-rw-r--r--R/pkg/inst/tests/testthat/test_context.R6
1 files changed, 5 insertions, 1 deletions
diff --git a/R/pkg/inst/tests/testthat/test_context.R b/R/pkg/inst/tests/testthat/test_context.R
index 0e5e15c0a9..95258babbf 100644
--- a/R/pkg/inst/tests/testthat/test_context.R
+++ b/R/pkg/inst/tests/testthat/test_context.R
@@ -27,6 +27,11 @@ test_that("Check masked functions", {
namesOfMasked <- c("describe", "cov", "filter", "lag", "na.omit", "predict", "sd", "var",
"colnames", "colnames<-", "intersect", "rank", "rbind", "sample", "subset",
"summary", "transform", "drop", "window", "as.data.frame")
+ namesOfMaskedCompletely <- c("cov", "filter", "sample")
+ if (as.numeric(R.version$major) == 3 && as.numeric(R.version$minor) > 2) {
+ namesOfMasked <- c("endsWith", "startsWith", namesOfMasked)
+ namesOfMaskedCompletely <- c("endsWith", "startsWith", namesOfMaskedCompletely)
+ }
expect_equal(length(maskedBySparkR), length(namesOfMasked))
expect_equal(sort(maskedBySparkR), sort(namesOfMasked))
# above are those reported as masked when `library(SparkR)`
@@ -36,7 +41,6 @@ test_that("Check masked functions", {
any(grepl("=\"ANY\"", capture.output(showMethods(x)[-1])))
}))
maskedCompletely <- masked[!funcHasAny]
- namesOfMaskedCompletely <- c("cov", "filter", "sample")
expect_equal(length(maskedCompletely), length(namesOfMaskedCompletely))
expect_equal(sort(maskedCompletely), sort(namesOfMaskedCompletely))
})