aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorCheng Hao <hao.cheng@intel.com>2015-07-08 00:10:24 -0700
committerReynold Xin <rxin@databricks.com>2015-07-08 00:10:24 -0700
commit351a36d0c54d2f995df956ffb0a4236e12f89aad (patch)
treee47f7be97705d0270d1e1eaf37af92862240cf4d /sql
parent08192a1b8a375f29fbd852bbaed3a50021737475 (diff)
downloadspark-351a36d0c54d2f995df956ffb0a4236e12f89aad.tar.gz
spark-351a36d0c54d2f995df956ffb0a4236e12f89aad.tar.bz2
spark-351a36d0c54d2f995df956ffb0a4236e12f89aad.zip
[SPARK-8883][SQL]Remove the OverrideFunctionRegistry
Remove the `OverrideFunctionRegistry` from the Spark SQL, as the subclasses of `FunctionRegistry` have their own way to the delegate to the right underlying `FunctionRegistry`. Author: Cheng Hao <hao.cheng@intel.com> Closes #7260 from chenghao-intel/override and squashes the following commits: 164d093 [Cheng Hao] enable the function registry 2ca8459 [Cheng Hao] remove the OverrideFunctionRegistry
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala13
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala2
4 files changed, 3 insertions, 17 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index fef2763530..5c25181e1c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -35,19 +35,6 @@ trait FunctionRegistry {
def lookupFunction(name: String, children: Seq[Expression]): Expression
}
-class OverrideFunctionRegistry(underlying: FunctionRegistry) extends FunctionRegistry {
-
- private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
-
- override def registerFunction(name: String, builder: FunctionBuilder): Unit = {
- functionBuilders.put(name, builder)
- }
-
- override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
- functionBuilders.get(name).map(_(children)).getOrElse(underlying.lookupFunction(name, children))
- }
-}
-
class SimpleFunctionRegistry extends FunctionRegistry {
private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index e81371e7b0..079f31ab8f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -139,8 +139,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
// TODO how to handle the temp function per user session?
@transient
- protected[sql] lazy val functionRegistry: FunctionRegistry =
- new OverrideFunctionRegistry(FunctionRegistry.builtin)
+ protected[sql] lazy val functionRegistry: FunctionRegistry = FunctionRegistry.builtin
@transient
protected[sql] lazy val analyzer: Analyzer =
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index b91242af2d..439d8cab5f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -371,7 +371,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
// Note that HiveUDFs will be overridden by functions registered in this context.
@transient
override protected[sql] lazy val functionRegistry: FunctionRegistry =
- new OverrideFunctionRegistry(new HiveFunctionRegistry(FunctionRegistry.builtin))
+ new HiveFunctionRegistry(FunctionRegistry.builtin)
/* An analyzer that uses the Hive metastore. */
@transient
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 1deef6b37e..0bc8adb16a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -77,7 +77,7 @@ private[hive] class HiveFunctionRegistry(underlying: analysis.FunctionRegistry)
}
override def registerFunction(name: String, builder: FunctionBuilder): Unit =
- throw new UnsupportedOperationException
+ underlying.registerFunction(name, builder)
}
private[hive] case class HiveSimpleUDF(funcWrapper: HiveFunctionWrapper, children: Seq[Expression])