aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorscwf <wangfei1@huawei.com>2015-05-19 17:36:00 -0700
committerMichael Armbrust <michael@databricks.com>2015-05-19 17:36:00 -0700
commit60336e3bc02a2587fdf315f9011bbe7c9d3a58c4 (patch)
tree8ad1577bbb2eb1177eb196727844496c7066d11f /sql
parent3860520633770cc5719b2cdebe6dc3608798386d (diff)
downloadspark-60336e3bc02a2587fdf315f9011bbe7c9d3a58c4.tar.gz
spark-60336e3bc02a2587fdf315f9011bbe7c9d3a58c4.tar.bz2
spark-60336e3bc02a2587fdf315f9011bbe7c9d3a58c4.zip
[SPARK-7656] [SQL] use CatalystConf in FunctionRegistry
follow up for #5806 Author: scwf <wangfei1@huawei.com> Closes #6164 from scwf/FunctionRegistry and squashes the following commits: 15e6697 [scwf] use catalogconf in FunctionRegistry
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala12
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
3 files changed, 9 insertions, 7 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 16ca5bcd57..0849faa9bf 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -17,6 +17,7 @@
package org.apache.spark.sql.catalyst.analysis
+import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.catalyst.expressions.Expression
import scala.collection.mutable
@@ -28,12 +29,12 @@ trait FunctionRegistry {
def lookupFunction(name: String, children: Seq[Expression]): Expression
- def caseSensitive: Boolean
+ def conf: CatalystConf
}
trait OverrideFunctionRegistry extends FunctionRegistry {
- val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive)
+ val functionBuilders = StringKeyHashMap[FunctionBuilder](conf.caseSensitiveAnalysis)
override def registerFunction(name: String, builder: FunctionBuilder): Unit = {
functionBuilders.put(name, builder)
@@ -44,8 +45,9 @@ trait OverrideFunctionRegistry extends FunctionRegistry {
}
}
-class SimpleFunctionRegistry(val caseSensitive: Boolean) extends FunctionRegistry {
- val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive)
+class SimpleFunctionRegistry(val conf: CatalystConf) extends FunctionRegistry {
+
+ val functionBuilders = StringKeyHashMap[FunctionBuilder](conf.caseSensitiveAnalysis)
override def registerFunction(name: String, builder: FunctionBuilder): Unit = {
functionBuilders.put(name, builder)
@@ -69,7 +71,7 @@ object EmptyFunctionRegistry extends FunctionRegistry {
throw new UnsupportedOperationException
}
- override def caseSensitive: Boolean = throw new UnsupportedOperationException
+ override def conf: CatalystConf = throw new UnsupportedOperationException
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index 316ef7d588..304e958192 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -121,7 +121,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
// TODO how to handle the temp function per user session?
@transient
- protected[sql] lazy val functionRegistry: FunctionRegistry = new SimpleFunctionRegistry(true)
+ protected[sql] lazy val functionRegistry: FunctionRegistry = new SimpleFunctionRegistry(conf)
@transient
protected[sql] lazy val analyzer: Analyzer =
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 2733ebdb95..863a5db1bf 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -357,7 +357,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
@transient
override protected[sql] lazy val functionRegistry =
new HiveFunctionRegistry with OverrideFunctionRegistry {
- def caseSensitive: Boolean = false
+ override def conf: CatalystConf = currentSession().conf
}
/* An analyzer that uses the Hive metastore. */