aboutsummaryrefslogtreecommitdiff
path: root/R/pkg
diff options
context:
space:
mode:
authorShivaram Venkataraman <shivaram@cs.berkeley.edu>2015-05-29 15:08:30 -0700
committerDavies Liu <davies@databricks.com>2015-05-29 15:08:30 -0700
commit5fb97dca9bcfc29ac33823554c8783997e811b99 (patch)
tree1e863a6fcc2fc98321cee115b0942e250117f05e /R/pkg
parent82a396c2f594bade276606dcd0c0545a650fb838 (diff)
downloadspark-5fb97dca9bcfc29ac33823554c8783997e811b99.tar.gz
spark-5fb97dca9bcfc29ac33823554c8783997e811b99.tar.bz2
spark-5fb97dca9bcfc29ac33823554c8783997e811b99.zip
[SPARK-7954] [SPARKR] Create SparkContext in sparkRSQL init
cc davies Author: Shivaram Venkataraman <shivaram@cs.berkeley.edu> Closes #6507 from shivaram/sparkr-init and squashes the following commits: 6fdd169 [Shivaram Venkataraman] Create SparkContext in sparkRSQL init
Diffstat (limited to 'R/pkg')
-rw-r--r--R/pkg/R/sparkR.R24
1 files changed, 19 insertions, 5 deletions
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 68387f0f53..5ced7c688f 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -225,14 +225,21 @@ sparkR.init <- function(
#' sqlContext <- sparkRSQL.init(sc)
#'}
-sparkRSQL.init <- function(jsc) {
+sparkRSQL.init <- function(jsc = NULL) {
if (exists(".sparkRSQLsc", envir = .sparkREnv)) {
return(get(".sparkRSQLsc", envir = .sparkREnv))
}
+ # If jsc is NULL, create a Spark Context
+ sc <- if (is.null(jsc)) {
+ sparkR.init()
+ } else {
+ jsc
+ }
+
sqlContext <- callJStatic("org.apache.spark.sql.api.r.SQLUtils",
- "createSQLContext",
- jsc)
+ "createSQLContext",
+ sc)
assign(".sparkRSQLsc", sqlContext, envir = .sparkREnv)
sqlContext
}
@@ -249,12 +256,19 @@ sparkRSQL.init <- function(jsc) {
#' sqlContext <- sparkRHive.init(sc)
#'}
-sparkRHive.init <- function(jsc) {
+sparkRHive.init <- function(jsc = NULL) {
if (exists(".sparkRHivesc", envir = .sparkREnv)) {
return(get(".sparkRHivesc", envir = .sparkREnv))
}
- ssc <- callJMethod(jsc, "sc")
+ # If jsc is NULL, create a Spark Context
+ sc <- if (is.null(jsc)) {
+ sparkR.init()
+ } else {
+ jsc
+ }
+
+ ssc <- callJMethod(sc, "sc")
hiveCtx <- tryCatch({
newJObject("org.apache.spark.sql.hive.HiveContext", ssc)
}, error = function(err) {