aboutsummaryrefslogtreecommitdiff
path: root/R/pkg/inst/tests/testthat/test_sparkSQL.R
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2016-12-09 19:06:05 -0800
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-12-09 19:06:05 -0800
commit3e11d5bfef2f05bd6d42c4d6188eae6d63c963ef (patch)
tree65805afddad591d2cbb837a7308d988c9694392a /R/pkg/inst/tests/testthat/test_sparkSQL.R
parentd2493a203e852adf63dde4e1fc993e8d11efec3d (diff)
downloadspark-3e11d5bfef2f05bd6d42c4d6188eae6d63c963ef.tar.gz
spark-3e11d5bfef2f05bd6d42c4d6188eae6d63c963ef.tar.bz2
spark-3e11d5bfef2f05bd6d42c4d6188eae6d63c963ef.zip
[SPARK-18807][SPARKR] Should suppress output print for calls to JVM methods with void return values
## What changes were proposed in this pull request? Several SparkR API calling into JVM methods that have void return values are getting printed out, especially when running in a REPL or IDE. example: ``` > setLogLevel("WARN") NULL ``` We should fix this to make the result more clear. Also found a small change to return value of dropTempView in 2.1 - adding doc and test for it. ## How was this patch tested? manually - I didn't find a expect_*() method in testthat for this Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #16237 from felixcheung/rinvis.
Diffstat (limited to 'R/pkg/inst/tests/testthat/test_sparkSQL.R')
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R14
1 files changed, 7 insertions, 7 deletions
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index c669c2e2e2..e8ccff8122 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -576,7 +576,7 @@ test_that("test tableNames and tables", {
tables <- tables()
expect_equal(count(tables), 2)
suppressWarnings(dropTempTable("table1"))
- dropTempView("table2")
+ expect_true(dropTempView("table2"))
tables <- tables()
expect_equal(count(tables), 0)
@@ -589,7 +589,7 @@ test_that(
newdf <- sql("SELECT * FROM table1 where name = 'Michael'")
expect_is(newdf, "SparkDataFrame")
expect_equal(count(newdf), 1)
- dropTempView("table1")
+ expect_true(dropTempView("table1"))
createOrReplaceTempView(df, "dfView")
sqlCast <- collect(sql("select cast('2' as decimal) as x from dfView limit 1"))
@@ -600,7 +600,7 @@ test_that(
expect_equal(ncol(sqlCast), 1)
expect_equal(out[1], " x")
expect_equal(out[2], "1 2")
- dropTempView("dfView")
+ expect_true(dropTempView("dfView"))
})
test_that("test cache, uncache and clearCache", {
@@ -609,7 +609,7 @@ test_that("test cache, uncache and clearCache", {
cacheTable("table1")
uncacheTable("table1")
clearCache()
- dropTempView("table1")
+ expect_true(dropTempView("table1"))
})
test_that("insertInto() on a registered table", {
@@ -630,13 +630,13 @@ test_that("insertInto() on a registered table", {
insertInto(dfParquet2, "table1")
expect_equal(count(sql("select * from table1")), 5)
expect_equal(first(sql("select * from table1 order by age"))$name, "Michael")
- dropTempView("table1")
+ expect_true(dropTempView("table1"))
createOrReplaceTempView(dfParquet, "table1")
insertInto(dfParquet2, "table1", overwrite = TRUE)
expect_equal(count(sql("select * from table1")), 2)
expect_equal(first(sql("select * from table1 order by age"))$name, "Bob")
- dropTempView("table1")
+ expect_true(dropTempView("table1"))
unlink(jsonPath2)
unlink(parquetPath2)
@@ -650,7 +650,7 @@ test_that("tableToDF() returns a new DataFrame", {
expect_equal(count(tabledf), 3)
tabledf2 <- tableToDF("table1")
expect_equal(count(tabledf2), 3)
- dropTempView("table1")
+ expect_true(dropTempView("table1"))
})
test_that("toRDD() returns an RRDD", {