aboutsummaryrefslogtreecommitdiff
path: root/R
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2016-07-14 09:45:30 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-07-14 09:45:30 -0700
commit12005c88fb24168d57b577cff73eddcd9d8963fc (patch)
treee7025ef62243280e6d149790c9f4db64762b7fc0 /R
parent093ebbc628699b40f091b5b7083c119fffa9314b (diff)
downloadspark-12005c88fb24168d57b577cff73eddcd9d8963fc.tar.gz
spark-12005c88fb24168d57b577cff73eddcd9d8963fc.tar.bz2
spark-12005c88fb24168d57b577cff73eddcd9d8963fc.zip
[SPARK-16538][SPARKR] fix R call with namespace operator on SparkSession functions
## What changes were proposed in this pull request? Fix function routing to work with and without namespace operator `SparkR::createDataFrame` ## How was this patch tested? manual, unit tests shivaram Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #14195 from felixcheung/rroutedefault.
Diffstat (limited to 'R')
-rw-r--r--R/pkg/R/SQLContext.R4
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R3
2 files changed, 5 insertions, 2 deletions
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index bc0daa25c9..d2ea1553c6 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -48,7 +48,9 @@ getInternalType <- function(x) {
#' @return whatever the target returns
#' @noRd
dispatchFunc <- function(newFuncSig, x, ...) {
- funcName <- as.character(sys.call(sys.parent())[[1]])
+ # When called with SparkR::createDataFrame, sys.call()[[1]] returns c(::, SparkR, createDataFrame)
+ callsite <- as.character(sys.call(sys.parent())[[1]])
+ funcName <- callsite[[length(callsite)]]
f <- get(paste0(funcName, ".default"))
# Strip sqlContext from list of parameters and then pass the rest along.
contextNames <- c("org.apache.spark.sql.SQLContext",
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 87868230eb..a1b1f1c567 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -2405,7 +2405,8 @@ test_that("createDataFrame sqlContext parameter backward compatibility", {
a <- 1:3
b <- c("a", "b", "c")
ldf <- data.frame(a, b)
- df <- suppressWarnings(createDataFrame(sqlContext, ldf))
+ # Call function with namespace :: operator - SPARK-16538
+ df <- suppressWarnings(SparkR::createDataFrame(sqlContext, ldf))
expect_equal(columns(df), c("a", "b"))
expect_equal(dtypes(df), list(c("a", "int"), c("b", "string")))
expect_equal(count(df), 3)