diff options
author | Hossein <hossein@databricks.com> | 2015-12-29 11:44:20 -0800 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2015-12-29 11:44:20 -0800 |
commit | f6ecf143335d734b8f22c59649c6bbd4d5401745 (patch) | |
tree | b953bb1a8d0d95f7fdb0d4ecf3cef456c2af484d | |
parent | 8e629b10cb5167926356e2f23d3c35610aa87ffe (diff) | |
download | spark-f6ecf143335d734b8f22c59649c6bbd4d5401745.tar.gz spark-f6ecf143335d734b8f22c59649c6bbd4d5401745.tar.bz2 spark-f6ecf143335d734b8f22c59649c6bbd4d5401745.zip |
[SPARK-11199][SPARKR] Improve R context management story and add getOrCreate
* Changes api.r.SQLUtils to use ```SQLContext.getOrCreate``` instead of creating a new context.
* Adds a simple test
[SPARK-11199] #comment link with JIRA
Author: Hossein <hossein@databricks.com>
Closes #9185 from falaki/SPARK-11199.
-rw-r--r-- | R/pkg/inst/tests/testthat/test_sparkSQL.R | 4 | ||||
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala | 2 |
2 files changed, 5 insertions, 1 deletions
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R index c2b6adbe3a..7b508b860e 100644 --- a/R/pkg/inst/tests/testthat/test_sparkSQL.R +++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R @@ -62,6 +62,10 @@ mockLinesComplexType <- complexTypeJsonPath <- tempfile(pattern="sparkr-test", fileext=".tmp") writeLines(mockLinesComplexType, complexTypeJsonPath) +test_that("calling sparkRSQL.init returns existing SQL context", { + expect_equal(sparkRSQL.init(sc), sqlContext) +}) + test_that("infer types and check types", { expect_equal(infer_type(1L), "integer") expect_equal(infer_type(1.0), "double") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index b3f134614c..67da7b808b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -32,7 +32,7 @@ private[r] object SQLUtils { SerDe.registerSqlSerDe((readSqlObject, writeSqlObject)) def createSQLContext(jsc: JavaSparkContext): SQLContext = { - new SQLContext(jsc) + SQLContext.getOrCreate(jsc.sc) } def getJavaSparkContext(sqlCtx: SQLContext): JavaSparkContext = { |