aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorReynold Xin <rxin@apache.org>2014-08-05 22:29:19 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-05 22:29:19 -0700
commitb70bae40eb9b46766e338ee79c882f1055d28912 (patch)
treeaddae74950447d25c214d9171a71cdc01a13e7ce /sql/hive
parent82624e2cf747688e7208bd535e29522dce3c7194 (diff)
downloadspark-b70bae40eb9b46766e338ee79c882f1055d28912.tar.gz
spark-b70bae40eb9b46766e338ee79c882f1055d28912.tar.bz2
spark-b70bae40eb9b46766e338ee79c882f1055d28912.zip
[SQL] Tighten the visibility of various SQLConf methods and renamed setter/getters
Author: Reynold Xin <rxin@apache.org> Closes #1794 from rxin/sql-conf and squashes the following commits: 3ac11ef [Reynold Xin] getAllConfs return an immutable Map instead of an Array. 4b19d6c [Reynold Xin] Tighten the visibility of various SQLConf methods and renamed setter/getters.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala12
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala14
3 files changed, 15 insertions, 15 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index d8e7a5943d..53f3dc11db 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -60,9 +60,9 @@ class LocalHiveContext(sc: SparkContext) extends HiveContext(sc) {
/** Sets up the system initially or after a RESET command */
protected def configure() {
- set("javax.jdo.option.ConnectionURL",
+ setConf("javax.jdo.option.ConnectionURL",
s"jdbc:derby:;databaseName=$metastorePath;create=true")
- set("hive.metastore.warehouse.dir", warehousePath)
+ setConf("hive.metastore.warehouse.dir", warehousePath)
}
configure() // Must be called before initializing the catalog below.
@@ -76,7 +76,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
self =>
// Change the default SQL dialect to HiveQL
- override private[spark] def dialect: String = get(SQLConf.DIALECT, "hiveql")
+ override private[spark] def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
override protected[sql] def executePlan(plan: LogicalPlan): this.QueryExecution =
new this.QueryExecution { val logical = plan }
@@ -224,15 +224,15 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
@transient protected[hive] lazy val hiveconf = new HiveConf(classOf[SessionState])
@transient protected[hive] lazy val sessionState = {
val ss = new SessionState(hiveconf)
- set(hiveconf.getAllProperties) // Have SQLConf pick up the initial set of HiveConf.
+ setConf(hiveconf.getAllProperties) // Have SQLConf pick up the initial set of HiveConf.
ss
}
sessionState.err = new PrintStream(outputBuffer, true, "UTF-8")
sessionState.out = new PrintStream(outputBuffer, true, "UTF-8")
- override def set(key: String, value: String): Unit = {
- super.set(key, value)
+ override def setConf(key: String, value: String): Unit = {
+ super.setConf(key, value)
runSqlHive(s"SET $key=$value")
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
index c605e8adcf..d890df866f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
@@ -65,9 +65,9 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
/** Sets up the system initially or after a RESET command */
protected def configure() {
- set("javax.jdo.option.ConnectionURL",
+ setConf("javax.jdo.option.ConnectionURL",
s"jdbc:derby:;databaseName=$metastorePath;create=true")
- set("hive.metastore.warehouse.dir", warehousePath)
+ setConf("hive.metastore.warehouse.dir", warehousePath)
}
configure() // Must be called before initializing the catalog below.
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 2f0be49b6a..fdb2f41f5a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -75,9 +75,9 @@ class HiveQuerySuite extends HiveComparisonTest {
"SELECT 2 / 1, 1 / 2, 1 / 3, 1 / COUNT(*) FROM src LIMIT 1")
test("Query expressed in SQL") {
- set("spark.sql.dialect", "sql")
+ setConf("spark.sql.dialect", "sql")
assert(sql("SELECT 1").collect() === Array(Seq(1)))
- set("spark.sql.dialect", "hiveql")
+ setConf("spark.sql.dialect", "hiveql")
}
@@ -436,18 +436,18 @@ class HiveQuerySuite extends HiveComparisonTest {
val testVal = "val0,val_1,val2.3,my_table"
sql(s"set $testKey=$testVal")
- assert(get(testKey, testVal + "_") == testVal)
+ assert(getConf(testKey, testVal + "_") == testVal)
sql("set some.property=20")
- assert(get("some.property", "0") == "20")
+ assert(getConf("some.property", "0") == "20")
sql("set some.property = 40")
- assert(get("some.property", "0") == "40")
+ assert(getConf("some.property", "0") == "40")
sql(s"set $testKey=$testVal")
- assert(get(testKey, "0") == testVal)
+ assert(getConf(testKey, "0") == testVal)
sql(s"set $testKey=")
- assert(get(testKey, "0") == "")
+ assert(getConf(testKey, "0") == "")
}
test("SET commands semantics for a HiveContext") {