aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-05-31 17:40:44 -0700
committerAndrew Or <andrew@databricks.com>2016-05-31 17:40:44 -0700
commit85d6b0db9f5bd425c36482ffcb1c3b9fd0fcdb31 (patch)
tree2e09a7e6c626ec965d86b31fd3b64207be766349 /sql/hive/src
parent93e97147eb499dde1e54e07ba113eebcbe25508a (diff)
downloadspark-85d6b0db9f5bd425c36482ffcb1c3b9fd0fcdb31.tar.gz
spark-85d6b0db9f5bd425c36482ffcb1c3b9fd0fcdb31.tar.bz2
spark-85d6b0db9f5bd425c36482ffcb1c3b9fd0fcdb31.zip
[SPARK-15618][SQL][MLLIB] Use SparkSession.builder.sparkContext if applicable.
## What changes were proposed in this pull request? This PR changes function `SparkSession.builder.sparkContext(..)` from **private[sql]** into **private[spark]**, and uses it if applicable like the followings. ``` - val spark = SparkSession.builder().config(sc.getConf).getOrCreate() + val spark = SparkSession.builder().sparkContext(sc).getOrCreate() ``` ## How was this patch tested? Pass the existing Jenkins tests. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #13365 from dongjoon-hyun/SPARK-15618.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala13
1 files changed, 5 insertions, 8 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index a4bbe96cf8..d56bede0cc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -31,7 +31,7 @@ import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.internal.Logging
-import org.apache.spark.sql.{QueryTest, Row, SparkSession, SQLContext}
+import org.apache.spark.sql.{QueryTest, Row, SparkSession}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, FunctionResource, JarResource}
import org.apache.spark.sql.expressions.Window
@@ -282,15 +282,12 @@ object SetWarehouseLocationTest extends Logging {
val hiveWarehouseLocation = Utils.createTempDir()
hiveWarehouseLocation.delete()
- val conf = new SparkConf()
- conf.set("spark.ui.enabled", "false")
// We will use the value of spark.sql.warehouse.dir override the
// value of hive.metastore.warehouse.dir.
- conf.set("spark.sql.warehouse.dir", warehouseLocation.toString)
- conf.set("hive.metastore.warehouse.dir", hiveWarehouseLocation.toString)
-
- val sparkSession = SparkSession.builder
- .config(conf)
+ val sparkSession = SparkSession.builder()
+ .config("spark.ui.enabled", "false")
+ .config("spark.sql.warehouse.dir", warehouseLocation.toString)
+ .config("hive.metastore.warehouse.dir", hiveWarehouseLocation.toString)
.enableHiveSupport()
.getOrCreate()