aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-04-29 16:41:13 -0700
committerAndrew Or <andrew@databricks.com>2016-04-29 16:41:13 -0700
commitd33e3d572ed7143f151f9c96fd08407f8de340f4 (patch)
tree6edbedd9e76ad883c9e9992f95cf3590cb4c1955 /sql
parent4ae9fe091c2cb8388c581093d62d3deaef40993e (diff)
downloadspark-d33e3d572ed7143f151f9c96fd08407f8de340f4.tar.gz
spark-d33e3d572ed7143f151f9c96fd08407f8de340f4.tar.bz2
spark-d33e3d572ed7143f151f9c96fd08407f8de340f4.zip
[SPARK-14988][PYTHON] SparkSession API follow-ups
## What changes were proposed in this pull request? Addresses comments in #12765. ## How was this patch tested? Python tests. Author: Andrew Or <andrew@databricks.com> Closes #12784 from andrewor14/python-followup.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala17
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala7
6 files changed, 28 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 1439d14980..08be94e8d4 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -2308,7 +2308,7 @@ class Dataset[T] private[sql](
* @since 1.6.0
*/
def registerTempTable(tableName: String): Unit = {
- sparkSession.registerDataFrameAsTable(toDF(), tableName)
+ sparkSession.registerTable(toDF(), tableName)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
index bf97d728b8..f2e851520e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
@@ -72,6 +72,15 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
*
* @since 2.0.0
*/
+ def get(key: String, default: String): String = {
+ sqlConf.getConfString(key, default)
+ }
+
+ /**
+ * Returns the value of Spark runtime configuration property for the given key.
+ *
+ * @since 2.0.0
+ */
def getOption(key: String): Option[String] = {
try Option(get(key)) catch {
case _: NoSuchElementException => None
@@ -86,4 +95,12 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
def unset(key: String): Unit = {
sqlConf.unsetConf(key)
}
+
+ /**
+ * Returns whether a particular key is set.
+ */
+ protected[sql] def contains(key: String): Boolean = {
+ sqlConf.contains(key)
+ }
+
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index 1f08a61aea..6dfac3d7ae 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -600,7 +600,7 @@ class SQLContext private[sql](
* only during the lifetime of this instance of SQLContext.
*/
private[sql] def registerDataFrameAsTable(df: DataFrame, tableName: String): Unit = {
- sparkSession.registerDataFrameAsTable(df, tableName)
+ sparkSession.registerTable(df, tableName)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 2814b70c9c..11c0aaab23 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -577,7 +577,7 @@ class SparkSession private(
* Registers the given [[DataFrame]] as a temporary table in the catalog.
* Temporary tables exist only during the lifetime of this instance of [[SparkSession]].
*/
- protected[sql] def registerDataFrameAsTable(df: DataFrame, tableName: String): Unit = {
+ protected[sql] def registerTable(df: DataFrame, tableName: String): Unit = {
sessionState.catalog.createTempTable(
sessionState.sqlParser.parseTableIdentifier(tableName).table,
df.logicalPlan,
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
index ec3fadab50..f05401b02b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
@@ -30,7 +30,7 @@ case class CacheTableCommand(
override def run(sparkSession: SparkSession): Seq[Row] = {
plan.foreach { logicalPlan =>
- sparkSession.registerDataFrameAsTable(Dataset.ofRows(sparkSession, logicalPlan), tableName)
+ sparkSession.registerTable(Dataset.ofRows(sparkSession, logicalPlan), tableName)
}
sparkSession.catalog.cacheTable(tableName)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 2bfc895678..7de7748211 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -755,6 +755,13 @@ private[sql] class SQLConf extends Serializable with CatalystConf with Logging {
}.toSeq
}
+ /**
+ * Return whether a given key is set in this [[SQLConf]].
+ */
+ def contains(key: String): Boolean = {
+ settings.containsKey(key)
+ }
+
private def setConfWithCheck(key: String, value: String): Unit = {
if (key.startsWith("spark.") && !key.startsWith("spark.sql.")) {
logWarning(s"Attempt to set non-Spark SQL config in SQLConf: key = $key, value = $value")