aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main/scala/org
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2017-02-20 12:21:07 -0800
committergatorsmile <gatorsmile@gmail.com>2017-02-20 12:21:07 -0800
commit0733a54a4517b82291efed9ac7f7407d9044593c (patch)
treec5ede60c50ad3097fa6451ef5a52b2cbbc4caed1 /sql/core/src/main/scala/org
parentead4ba0eb5841e42e6a57c1a1865bf89564e8ff9 (diff)
downloadspark-0733a54a4517b82291efed9ac7f7407d9044593c.tar.gz
spark-0733a54a4517b82291efed9ac7f7407d9044593c.tar.bz2
spark-0733a54a4517b82291efed9ac7f7407d9044593c.zip
[SPARK-19669][SQL] Open up visibility for sharedState, sessionState, and a few other functions
## What changes were proposed in this pull request? To ease debugging, most of Spark SQL internals have public level visibility. Two of the most important internal states, sharedState and sessionState, however, are package private. It would make more sense to open these up as well with clear documentation that they are internal. In addition, users currently have way to set active/default SparkSession, but no way to actually get them back. We should open those up as well. ## How was this patch tested? N/A - only visibility change. Author: Reynold Xin <rxin@databricks.com> Closes #17002 from rxin/SPARK-19669.
Diffstat (limited to 'sql/core/src/main/scala/org')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala29
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala4
2 files changed, 26 insertions, 7 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 1975a56caf..72af55c1fa 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -95,18 +95,28 @@ class SparkSession private(
/**
* State shared across sessions, including the `SparkContext`, cached data, listener,
* and a catalog that interacts with external systems.
+ *
+ * This is internal to Spark and there is no guarantee on interface stability.
+ *
+ * @since 2.2.0
*/
+ @InterfaceStability.Unstable
@transient
- private[sql] lazy val sharedState: SharedState = {
+ lazy val sharedState: SharedState = {
existingSharedState.getOrElse(new SharedState(sparkContext))
}
/**
* State isolated across sessions, including SQL configurations, temporary tables, registered
* functions, and everything else that accepts a [[org.apache.spark.sql.internal.SQLConf]].
+ *
+ * This is internal to Spark and there is no guarantee on interface stability.
+ *
+ * @since 2.2.0
*/
+ @InterfaceStability.Unstable
@transient
- private[sql] lazy val sessionState: SessionState = {
+ lazy val sessionState: SessionState = {
SparkSession.reflect[SessionState, SparkSession](
SparkSession.sessionStateClassName(sparkContext.conf),
self)
@@ -613,7 +623,6 @@ class SparkSession private(
*
* @since 2.1.0
*/
- @InterfaceStability.Stable
def time[T](f: => T): T = {
val start = System.nanoTime()
val ret = f
@@ -928,9 +937,19 @@ object SparkSession {
defaultSession.set(null)
}
- private[sql] def getActiveSession: Option[SparkSession] = Option(activeThreadSession.get)
+ /**
+ * Returns the active SparkSession for the current thread, returned by the builder.
+ *
+ * @since 2.2.0
+ */
+ def getActiveSession: Option[SparkSession] = Option(activeThreadSession.get)
- private[sql] def getDefaultSession: Option[SparkSession] = Option(defaultSession.get)
+ /**
+ * Returns the default SparkSession that is returned by the builder.
+ *
+ * @since 2.2.0
+ */
+ def getDefaultSession: Option[SparkSession] = Option(defaultSession.get)
/** A global SQL listener used for the SQL UI. */
private[sql] val sqlListener = new AtomicReference[SQLListener]()
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala
index 8de95fe64e..7ce9938f0d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala
@@ -39,7 +39,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
// Load hive-site.xml into hadoopConf and determine the warehouse path we want to use, based on
// the config from both hive and Spark SQL. Finally set the warehouse config value to sparkConf.
- val warehousePath = {
+ val warehousePath: String = {
val configFile = Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
if (configFile != null) {
sparkContext.hadoopConfiguration.addResource(configFile)
@@ -103,7 +103,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
/**
* A manager for global temporary views.
*/
- val globalTempViewManager = {
+ val globalTempViewManager: GlobalTempViewManager = {
// System preserved database should not exists in metastore. However it's hard to guarantee it
// for every session, because case-sensitivity differs. Here we always lowercase it to make our
// life easier.