aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-11-19 09:00:11 +0000
committerSean Owen <sowen@cloudera.com>2016-11-19 09:00:11 +0000
commitdb9fb9baacbf8640dd37a507b7450db727c7e6ea (patch)
tree26b59a6df66eee13d65dc438c8fa954736a85f90
parent2a40de408b5eb47edba92f9fe92a42ed1e78bf98 (diff)
downloadspark-db9fb9baacbf8640dd37a507b7450db727c7e6ea.tar.gz
spark-db9fb9baacbf8640dd37a507b7450db727c7e6ea.tar.bz2
spark-db9fb9baacbf8640dd37a507b7450db727c7e6ea.zip
[SPARK-18448][CORE] SparkSession should implement java.lang.AutoCloseable like JavaSparkContext
## What changes were proposed in this pull request? Just adds `close()` + `Closeable` as a synonym for `stop()`. This makes it usable in Java in try-with-resources, as suggested by ash211 (`Closeable` extends `AutoCloseable` BTW) ## How was this patch tested? Existing tests Author: Sean Owen <sowen@cloudera.com> Closes #15932 from srowen/SPARK-18448.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala10
1 files changed, 9 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 3045eb69f4..58b2ab3957 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql
import java.beans.Introspector
+import java.io.Closeable
import java.util.concurrent.atomic.AtomicReference
import scala.collection.JavaConverters._
@@ -72,7 +73,7 @@ import org.apache.spark.util.Utils
class SparkSession private(
@transient val sparkContext: SparkContext,
@transient private val existingSharedState: Option[SharedState])
- extends Serializable with Logging { self =>
+ extends Serializable with Closeable with Logging { self =>
private[sql] def this(sc: SparkContext) {
this(sc, None)
@@ -648,6 +649,13 @@ class SparkSession private(
}
/**
+ * Synonym for `stop()`.
+ *
+ * @since 2.2.0
+ */
+ override def close(): Unit = stop()
+
+ /**
* Parses the data type in our internal string representation. The data type string should
* have the same format as the one generated by `toString` in scala.
* It is only used by PySpark.