aboutsummaryrefslogtreecommitdiff
path: root/R/pkg/inst/tests/testthat/test_sparkSQL.R
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2017-04-06 09:15:13 -0700
committerFelix Cheung <felixcheung@apache.org>2017-04-06 09:15:13 -0700
commit5a693b4138d4ce948e3bcdbe28d5c01d5deb8fa9 (patch)
treeb71f548c8544c97b477e6a5fb69f895c03e27436 /R/pkg/inst/tests/testthat/test_sparkSQL.R
parentbccc330193217b2ec9660e06f1db6dd58f7af5d8 (diff)
downloadspark-5a693b4138d4ce948e3bcdbe28d5c01d5deb8fa9.tar.gz
spark-5a693b4138d4ce948e3bcdbe28d5c01d5deb8fa9.tar.bz2
spark-5a693b4138d4ce948e3bcdbe28d5c01d5deb8fa9.zip
[SPARK-20195][SPARKR][SQL] add createTable catalog API and deprecate createExternalTable
## What changes were proposed in this pull request? Following up on #17483, add createTable (which is new in 2.2.0) and deprecate createExternalTable, plus a number of minor fixes ## How was this patch tested? manual, unit tests Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #17511 from felixcheung/rceatetable.
Diffstat (limited to 'R/pkg/inst/tests/testthat/test_sparkSQL.R')
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R20
1 files changed, 15 insertions, 5 deletions
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index ad06711a79..58cf24256a 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -281,7 +281,7 @@ test_that("create DataFrame from RDD", {
setHiveContext(sc)
sql("CREATE TABLE people (name string, age double, height float)")
df <- read.df(jsonPathNa, "json", schema)
- invisible(insertInto(df, "people"))
+ insertInto(df, "people")
expect_equal(collect(sql("SELECT age from people WHERE name = 'Bob'"))$age,
c(16))
expect_equal(collect(sql("SELECT height from people WHERE name ='Bob'"))$height,
@@ -1268,7 +1268,16 @@ test_that("column calculation", {
test_that("test HiveContext", {
setHiveContext(sc)
- df <- createExternalTable("json", jsonPath, "json")
+
+ schema <- structType(structField("name", "string"), structField("age", "integer"),
+ structField("height", "float"))
+ createTable("people", source = "json", schema = schema)
+ df <- read.df(jsonPathNa, "json", schema)
+ insertInto(df, "people")
+ expect_equal(collect(sql("SELECT age from people WHERE name = 'Bob'"))$age, c(16))
+ sql("DROP TABLE people")
+
+ df <- createTable("json", jsonPath, "json")
expect_is(df, "SparkDataFrame")
expect_equal(count(df), 3)
df2 <- sql("select * from json")
@@ -1276,25 +1285,26 @@ test_that("test HiveContext", {
expect_equal(count(df2), 3)
jsonPath2 <- tempfile(pattern = "sparkr-test", fileext = ".tmp")
- invisible(saveAsTable(df, "json2", "json", "append", path = jsonPath2))
+ saveAsTable(df, "json2", "json", "append", path = jsonPath2)
df3 <- sql("select * from json2")
expect_is(df3, "SparkDataFrame")
expect_equal(count(df3), 3)
unlink(jsonPath2)
hivetestDataPath <- tempfile(pattern = "sparkr-test", fileext = ".tmp")
- invisible(saveAsTable(df, "hivetestbl", path = hivetestDataPath))
+ saveAsTable(df, "hivetestbl", path = hivetestDataPath)
df4 <- sql("select * from hivetestbl")
expect_is(df4, "SparkDataFrame")
expect_equal(count(df4), 3)
unlink(hivetestDataPath)
parquetDataPath <- tempfile(pattern = "sparkr-test", fileext = ".tmp")
- invisible(saveAsTable(df, "parquetest", "parquet", mode = "overwrite", path = parquetDataPath))
+ saveAsTable(df, "parquetest", "parquet", mode = "overwrite", path = parquetDataPath)
df5 <- sql("select * from parquetest")
expect_is(df5, "SparkDataFrame")
expect_equal(count(df5), 3)
unlink(parquetDataPath)
+
unsetHiveContext()
})