aboutsummaryrefslogtreecommitdiff
path: root/R
diff options
context:
space:
mode:
authorfelixcheung <felixcheung_m@hotmail.com>2016-01-19 18:31:03 -0800
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-01-19 18:31:03 -0800
commit488bbb216c82306e82b8963a331d48d484e8eadd (patch)
treeb8124d51a1806e68b8038b20481221b7fe1765ec /R
parentbeda9014220be77dd735e6af1903e7d93dceb110 (diff)
downloadspark-488bbb216c82306e82b8963a331d48d484e8eadd.tar.gz
spark-488bbb216c82306e82b8963a331d48d484e8eadd.tar.bz2
spark-488bbb216c82306e82b8963a331d48d484e8eadd.zip
[SPARK-12232][SPARKR] New R API for read.table to avoid name conflict
shivaram sorry it took longer to fix some conflicts, this is the change to add an alias for `table` Author: felixcheung <felixcheung_m@hotmail.com> Closes #10406 from felixcheung/readtable.
Diffstat (limited to 'R')
-rw-r--r--R/pkg/NAMESPACE2
-rw-r--r--R/pkg/R/SQLContext.R7
-rw-r--r--R/pkg/inst/tests/testthat/test_context.R15
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R13
4 files changed, 17 insertions, 20 deletions
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 7739e9ea86..00634c1a70 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -280,7 +280,7 @@ export("as.DataFrame",
"read.text",
"sql",
"str",
- "table",
+ "tableToDF",
"tableNames",
"tables",
"uncacheTable")
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index 99679b4a77..16a2578678 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -352,6 +352,8 @@ sql <- function(sqlContext, sqlQuery) {
#' @param sqlContext SQLContext to use
#' @param tableName The SparkSQL Table to convert to a DataFrame.
#' @return DataFrame
+#' @rdname tableToDF
+#' @name tableToDF
#' @export
#' @examples
#'\dontrun{
@@ -360,15 +362,14 @@ sql <- function(sqlContext, sqlQuery) {
#' path <- "path/to/file.json"
#' df <- read.json(sqlContext, path)
#' registerTempTable(df, "table")
-#' new_df <- table(sqlContext, "table")
+#' new_df <- tableToDF(sqlContext, "table")
#' }
-table <- function(sqlContext, tableName) {
+tableToDF <- function(sqlContext, tableName) {
sdf <- callJMethod(sqlContext, "table", tableName)
dataFrame(sdf)
}
-
#' Tables
#'
#' Returns a DataFrame containing names of tables in the given database.
diff --git a/R/pkg/inst/tests/testthat/test_context.R b/R/pkg/inst/tests/testthat/test_context.R
index 92dbd575c2..3b14a497b4 100644
--- a/R/pkg/inst/tests/testthat/test_context.R
+++ b/R/pkg/inst/tests/testthat/test_context.R
@@ -24,11 +24,11 @@ test_that("Check masked functions", {
func <- lapply(masked, function(x) { capture.output(showMethods(x))[[1]] })
funcSparkROrEmpty <- grepl("\\(package SparkR\\)$|^$", func)
maskedBySparkR <- masked[funcSparkROrEmpty]
- expect_equal(length(maskedBySparkR), 18)
- expect_equal(sort(maskedBySparkR), sort(c("describe", "cov", "filter", "lag", "na.omit",
- "predict", "sd", "var", "colnames", "colnames<-",
- "intersect", "rank", "rbind", "sample", "subset",
- "summary", "table", "transform")))
+ namesOfMasked <- c("describe", "cov", "filter", "lag", "na.omit", "predict", "sd", "var",
+ "colnames", "colnames<-", "intersect", "rank", "rbind", "sample", "subset",
+ "summary", "transform")
+ expect_equal(length(maskedBySparkR), length(namesOfMasked))
+ expect_equal(sort(maskedBySparkR), sort(namesOfMasked))
# above are those reported as masked when `library(SparkR)`
# note that many of these methods are still callable without base:: or stats:: prefix
# there should be a test for each of these, except followings, which are currently "broken"
@@ -36,8 +36,9 @@ test_that("Check masked functions", {
any(grepl("=\"ANY\"", capture.output(showMethods(x)[-1])))
}))
maskedCompletely <- masked[!funcHasAny]
- expect_equal(length(maskedCompletely), 4)
- expect_equal(sort(maskedCompletely), sort(c("cov", "filter", "sample", "table")))
+ namesOfMaskedCompletely <- c("cov", "filter", "sample")
+ expect_equal(length(maskedCompletely), length(namesOfMaskedCompletely))
+ expect_equal(sort(maskedCompletely), sort(namesOfMaskedCompletely))
})
test_that("repeatedly starting and stopping SparkR", {
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 6610734cf4..14d40d5066 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -335,7 +335,6 @@ writeLines(mockLinesMapType, mapTypeJsonPath)
test_that("Collect DataFrame with complex types", {
# ArrayType
df <- read.json(sqlContext, complexTypeJsonPath)
-
ldf <- collect(df)
expect_equal(nrow(ldf), 3)
expect_equal(ncol(ldf), 3)
@@ -490,19 +489,15 @@ test_that("insertInto() on a registered table", {
unlink(parquetPath2)
})
-test_that("table() returns a new DataFrame", {
+test_that("tableToDF() returns a new DataFrame", {
df <- read.json(sqlContext, jsonPath)
registerTempTable(df, "table1")
- tabledf <- table(sqlContext, "table1")
+ tabledf <- tableToDF(sqlContext, "table1")
expect_is(tabledf, "DataFrame")
expect_equal(count(tabledf), 3)
+ tabledf2 <- tableToDF(sqlContext, "table1")
+ expect_equal(count(tabledf2), 3)
dropTempTable(sqlContext, "table1")
-
- # nolint start
- # Test base::table is working
- #a <- letters[1:3]
- #expect_equal(class(table(a, sample(a))), "table")
- # nolint end
})
test_that("toRDD() returns an RRDD", {