aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala9
-rw-r--r--repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala4
-rw-r--r--repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala14
4 files changed, 19 insertions, 12 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala b/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala
index 8ce4427c53..b4118b16e2 100644
--- a/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala
@@ -18,7 +18,6 @@
// scalastyle:off println
package org.apache.spark.examples.sql
-import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SaveMode, SparkSession}
// One method for defining the schema of an RDD is to make a case class with the desired column
@@ -27,14 +26,12 @@ case class Record(key: Int, value: String)
object RDDRelation {
def main(args: Array[String]) {
- val sparkConf = new SparkConf().setAppName("RDDRelation")
- val sc = new SparkContext(sparkConf)
- val spark = new SparkSession(sc)
+ val spark = SparkSession.builder.appName("RDDRelation").getOrCreate()
// Importing the SparkSession gives access to all the SQL functions and implicit conversions.
import spark.implicits._
- val df = sc.parallelize((1 to 100).map(i => Record(i, s"val_$i"))).toDF()
+ val df = spark.createDataFrame((1 to 100).map(i => Record(i, s"val_$i")))
// Any RDD containing case classes can be registered as a table. The schema of the table is
// automatically inferred using scala reflection.
df.registerTempTable("records")
@@ -70,7 +67,7 @@ object RDDRelation {
parquetFile.registerTempTable("parquetFile")
spark.sql("SELECT * FROM parquetFile").collect().foreach(println)
- sc.stop()
+ spark.stop()
}
}
// scalastyle:on println
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 6a811adcf9..c4f64505a2 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -1030,10 +1030,10 @@ class SparkILoop(
def createSparkSession(): SparkSession = {
if (SparkSession.hiveClassesArePresent) {
logInfo("Creating Spark session with Hive support")
- SparkSession.withHiveSupport(sparkContext)
+ SparkSession.builder.enableHiveSupport().getOrCreate()
} else {
logInfo("Creating Spark session")
- new SparkSession(sparkContext)
+ SparkSession.builder.getOrCreate()
}
}
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 8e381ff6ae..a171759809 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -94,10 +94,10 @@ object Main extends Logging {
def createSparkSession(): SparkSession = {
if (SparkSession.hiveClassesArePresent) {
- sparkSession = SparkSession.withHiveSupport(sparkContext)
+ sparkSession = SparkSession.builder.enableHiveSupport().getOrCreate()
logInfo("Created Spark session with Hive support")
} else {
- sparkSession = new SparkSession(sparkContext)
+ sparkSession = SparkSession.builder.getOrCreate()
logInfo("Created Spark session")
}
sparkSession
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 3836ce2daa..aa7c335c53 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -54,6 +54,7 @@ import org.apache.spark.util.Utils
* {{{
* SparkSession.builder()
* .master("local")
+ * .appName("Word Count")
* .config("spark.some.config.option", "some-value").
* .getOrCreate()
* }}}
@@ -63,7 +64,7 @@ class SparkSession private(
@transient private val existingSharedState: Option[SharedState])
extends Serializable with Logging { self =>
- def this(sc: SparkContext) {
+ private[sql] def this(sc: SparkContext) {
this(sc, None)
}
@@ -573,7 +574,7 @@ class SparkSession private(
* common Scala objects into [[DataFrame]]s.
*
* {{{
- * val sparkSession = new SparkSession(sc)
+ * val sparkSession = SparkSession.builder.getOrCreate()
* import sparkSession.implicits._
* }}}
*
@@ -586,6 +587,15 @@ class SparkSession private(
}
// scalastyle:on
+ /**
+ * Stop the underlying [[SparkContext]].
+ *
+ * @since 2.0.0
+ */
+ def stop(): Unit = {
+ sparkContext.stop()
+ }
+
protected[sql] def parseSql(sql: String): LogicalPlan = {
sessionState.sqlParser.parsePlan(sql)
}