aboutsummaryrefslogtreecommitdiff
path: root/R
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2016-09-02 10:12:10 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-09-02 10:12:10 -0700
commit812333e4336113e44d2c9473bcba1cee4a989d2c (patch)
tree9b111f43edad88b891462127553c9ba2208464c0 /R
parentea662286561aa9fe321cb0a0e10cdeaf60440b90 (diff)
downloadspark-812333e4336113e44d2c9473bcba1cee4a989d2c.tar.gz
spark-812333e4336113e44d2c9473bcba1cee4a989d2c.tar.bz2
spark-812333e4336113e44d2c9473bcba1cee4a989d2c.zip
[SPARK-17376][SPARKR] Spark version should be available in R
## What changes were proposed in this pull request? Add sparkR.version() API. ``` > sparkR.version() [1] "2.1.0-SNAPSHOT" ``` ## How was this patch tested? manual, unit tests Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #14935 from felixcheung/rsparksessionversion.
Diffstat (limited to 'R')
-rw-r--r--R/pkg/NAMESPACE13
-rw-r--r--R/pkg/R/SQLContext.R19
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R6
3 files changed, 32 insertions, 6 deletions
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 5e625b2d8d..ce41b512a4 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -15,8 +15,15 @@ export("sparkR.init")
export("sparkR.stop")
export("sparkR.session.stop")
export("sparkR.conf")
+export("sparkR.version")
export("print.jobj")
+export("sparkR.newJObject")
+export("sparkR.callJMethod")
+export("sparkR.callJStatic")
+
+export("install.spark")
+
export("sparkRSQL.init",
"sparkRHive.init")
@@ -363,9 +370,3 @@ S3method(structField, character)
S3method(structField, jobj)
S3method(structType, jobj)
S3method(structType, structField)
-
-export("sparkR.newJObject")
-export("sparkR.callJMethod")
-export("sparkR.callJStatic")
-
-export("install.spark")
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index 572e71e25b..a1404543be 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -156,6 +156,25 @@ sparkR.conf <- function(key, defaultValue) {
}
}
+#' Get version of Spark on which this application is running
+#'
+#' Get version of Spark on which this application is running.
+#'
+#' @return a character string of the Spark version
+#' @rdname sparkR.version
+#' @name sparkR.version
+#' @export
+#' @examples
+#'\dontrun{
+#' sparkR.session()
+#' version <- sparkR.version()
+#' }
+#' @note sparkR.version since 2.1.0
+sparkR.version <- function() {
+ sparkSession <- getSparkSession()
+ callJMethod(sparkSession, "version")
+}
+
getDefaultSqlSource <- function() {
l <- sparkR.conf("spark.sql.sources.default", "org.apache.spark.sql.parquet")
l[["spark.sql.sources.default"]]
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 683a15cb4f..aac3f62204 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -2507,6 +2507,12 @@ test_that("enableHiveSupport on SparkSession", {
expect_equal(value, "hive")
})
+test_that("Spark version from SparkSession", {
+ ver <- callJMethod(sc, "version")
+ version <- sparkR.version()
+ expect_equal(ver, version)
+})
+
unlink(parquetPath)
unlink(orcPath)
unlink(jsonPath)