aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/main/scala/org
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core/src/main/scala/org')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala29
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala4
2 files changed, 26 insertions, 7 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 1975a56caf..72af55c1fa 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -95,18 +95,28 @@ class SparkSession private(
/**
* State shared across sessions, including the `SparkContext`, cached data, listener,
* and a catalog that interacts with external systems.
+ *
+ * This is internal to Spark and there is no guarantee on interface stability.
+ *
+ * @since 2.2.0
*/
+ @InterfaceStability.Unstable
@transient
- private[sql] lazy val sharedState: SharedState = {
+ lazy val sharedState: SharedState = {
existingSharedState.getOrElse(new SharedState(sparkContext))
}
/**
* State isolated across sessions, including SQL configurations, temporary tables, registered
* functions, and everything else that accepts a [[org.apache.spark.sql.internal.SQLConf]].
+ *
+ * This is internal to Spark and there is no guarantee on interface stability.
+ *
+ * @since 2.2.0
*/
+ @InterfaceStability.Unstable
@transient
- private[sql] lazy val sessionState: SessionState = {
+ lazy val sessionState: SessionState = {
SparkSession.reflect[SessionState, SparkSession](
SparkSession.sessionStateClassName(sparkContext.conf),
self)
@@ -613,7 +623,6 @@ class SparkSession private(
*
* @since 2.1.0
*/
- @InterfaceStability.Stable
def time[T](f: => T): T = {
val start = System.nanoTime()
val ret = f
@@ -928,9 +937,19 @@ object SparkSession {
defaultSession.set(null)
}
- private[sql] def getActiveSession: Option[SparkSession] = Option(activeThreadSession.get)
+ /**
+ * Returns the active SparkSession for the current thread, returned by the builder.
+ *
+ * @since 2.2.0
+ */
+ def getActiveSession: Option[SparkSession] = Option(activeThreadSession.get)
- private[sql] def getDefaultSession: Option[SparkSession] = Option(defaultSession.get)
+ /**
+ * Returns the default SparkSession that is returned by the builder.
+ *
+ * @since 2.2.0
+ */
+ def getDefaultSession: Option[SparkSession] = Option(defaultSession.get)
/** A global SQL listener used for the SQL UI. */
private[sql] val sqlListener = new AtomicReference[SQLListener]()
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala
index 8de95fe64e..7ce9938f0d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala
@@ -39,7 +39,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
// Load hive-site.xml into hadoopConf and determine the warehouse path we want to use, based on
// the config from both hive and Spark SQL. Finally set the warehouse config value to sparkConf.
- val warehousePath = {
+ val warehousePath: String = {
val configFile = Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
if (configFile != null) {
sparkContext.hadoopConfiguration.addResource(configFile)
@@ -103,7 +103,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
/**
* A manager for global temporary views.
*/
- val globalTempViewManager = {
+ val globalTempViewManager: GlobalTempViewManager = {
// System preserved database should not exists in metastore. However it's hard to guarantee it
// for every session, because case-sensitivity differs. Here we always lowercase it to make our
// life easier.