aboutsummaryrefslogtreecommitdiff
path: root/R/pkg/inst/tests/testthat/test_context.R
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2016-07-17 19:02:21 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-07-17 19:02:21 -0700
commitd27fe9ba6763aae6a5e48f16d7cbd85658df7cf7 (patch)
tree26d85a87f9f04f8ccf3ff48c5e612bd0c202d44e /R/pkg/inst/tests/testthat/test_context.R
parent7b84758034b9bceca1168438ef5d0beefd5b5273 (diff)
downloadspark-d27fe9ba6763aae6a5e48f16d7cbd85658df7cf7.tar.gz
spark-d27fe9ba6763aae6a5e48f16d7cbd85658df7cf7.tar.bz2
spark-d27fe9ba6763aae6a5e48f16d7cbd85658df7cf7.zip
[SPARK-16027][SPARKR] Fix R tests SparkSession init/stop
## What changes were proposed in this pull request? Fix R SparkSession init/stop, and warnings of reusing existing Spark Context ## How was this patch tested? unit tests shivaram Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #14177 from felixcheung/rsessiontest.
Diffstat (limited to 'R/pkg/inst/tests/testthat/test_context.R')
-rw-r--r--R/pkg/inst/tests/testthat/test_context.R23
1 files changed, 10 insertions, 13 deletions
diff --git a/R/pkg/inst/tests/testthat/test_context.R b/R/pkg/inst/tests/testthat/test_context.R
index 2a1bd61b11..8bd134a58d 100644
--- a/R/pkg/inst/tests/testthat/test_context.R
+++ b/R/pkg/inst/tests/testthat/test_context.R
@@ -63,18 +63,14 @@ test_that("repeatedly starting and stopping SparkR", {
}
})
-# Does not work consistently even with Hive off
-# nolint start
-# test_that("repeatedly starting and stopping SparkR", {
-# for (i in 1:4) {
-# sparkR.session(enableHiveSupport = FALSE)
-# df <- createDataFrame(data.frame(dummy=1:i))
-# expect_equal(count(df), i)
-# sparkR.session.stop()
-# Sys.sleep(5) # Need more time to shutdown Hive metastore
-# }
-# })
-# nolint end
+test_that("repeatedly starting and stopping SparkSession", {
+ for (i in 1:4) {
+ sparkR.session(enableHiveSupport = FALSE)
+ df <- createDataFrame(data.frame(dummy = 1:i))
+ expect_equal(count(df), i)
+ sparkR.session.stop()
+ }
+})
test_that("rdd GC across sparkR.stop", {
sc <- sparkR.sparkContext() # sc should get id 0
@@ -96,6 +92,7 @@ test_that("rdd GC across sparkR.stop", {
count(rdd3)
count(rdd4)
+ sparkR.session.stop()
})
test_that("job group functions can be called", {
@@ -164,7 +161,7 @@ test_that("sparkJars sparkPackages as comma-separated strings", {
})
test_that("spark.lapply should perform simple transforms", {
- sc <- sparkR.sparkContext()
+ sparkR.sparkContext()
doubled <- spark.lapply(1:10, function(x) { 2 * x })
expect_equal(doubled, as.list(2 * 1:10))
sparkR.session.stop()