aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorFelix Cheung <felixcheung_m@hotmail.com>2016-06-26 13:10:43 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-06-26 13:10:43 -0700
commit30b182bcc088aef161585211c517f473b9ee6632 (patch)
tree6681d46cb578fb214902a46109c0124565cac281 /sql
parente87741589a24821b5fe73e5d9ee2164247998580 (diff)
downloadspark-30b182bcc088aef161585211c517f473b9ee6632.tar.gz
spark-30b182bcc088aef161585211c517f473b9ee6632.tar.bz2
spark-30b182bcc088aef161585211c517f473b9ee6632.zip
[SPARK-16184][SPARKR] conf API for SparkSession
## What changes were proposed in this pull request? Add `conf` method to get Runtime Config from SparkSession ## How was this patch tested? unit tests, manual tests This is how it works in sparkR shell: ``` SparkSession available as 'spark'. > conf() $hive.metastore.warehouse.dir [1] "file:/opt/spark-2.0.0-bin-hadoop2.6/R/spark-warehouse" $spark.app.id [1] "local-1466749575523" $spark.app.name [1] "SparkR" $spark.driver.host [1] "10.0.2.1" $spark.driver.port [1] "45629" $spark.executorEnv.LD_LIBRARY_PATH [1] "$LD_LIBRARY_PATH:/usr/lib/R/lib:/usr/lib/x86_64-linux-gnu:/usr/lib/jvm/default-java/jre/lib/amd64/server" $spark.executor.id [1] "driver" $spark.home [1] "/opt/spark-2.0.0-bin-hadoop2.6" $spark.master [1] "local[*]" $spark.sql.catalogImplementation [1] "hive" $spark.submit.deployMode [1] "client" > conf("spark.master") $spark.master [1] "local[*]" ``` Author: Felix Cheung <felixcheung_m@hotmail.com> Closes #13885 from felixcheung/rconf.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala4
1 files changed, 4 insertions, 0 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
index 0a995d2e9d..7d8ea03a27 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
@@ -71,6 +71,10 @@ private[sql] object SQLUtils extends Logging {
}
}
+ def getSessionConf(spark: SparkSession): JMap[String, String] = {
+ spark.conf.getAll.asJava
+ }
+
def getJavaSparkContext(spark: SparkSession): JavaSparkContext = {
new JavaSparkContext(spark.sparkContext)
}