aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala13
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala2
4 files changed, 3 insertions, 17 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index fef2763530..5c25181e1c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -35,19 +35,6 @@ trait FunctionRegistry {
def lookupFunction(name: String, children: Seq[Expression]): Expression
}
-class OverrideFunctionRegistry(underlying: FunctionRegistry) extends FunctionRegistry {
-
- private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
-
- override def registerFunction(name: String, builder: FunctionBuilder): Unit = {
- functionBuilders.put(name, builder)
- }
-
- override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
- functionBuilders.get(name).map(_(children)).getOrElse(underlying.lookupFunction(name, children))
- }
-}
-
class SimpleFunctionRegistry extends FunctionRegistry {
private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index e81371e7b0..079f31ab8f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -139,8 +139,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
// TODO how to handle the temp function per user session?
@transient
- protected[sql] lazy val functionRegistry: FunctionRegistry =
- new OverrideFunctionRegistry(FunctionRegistry.builtin)
+ protected[sql] lazy val functionRegistry: FunctionRegistry = FunctionRegistry.builtin
@transient
protected[sql] lazy val analyzer: Analyzer =
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index b91242af2d..439d8cab5f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -371,7 +371,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
// Note that HiveUDFs will be overridden by functions registered in this context.
@transient
override protected[sql] lazy val functionRegistry: FunctionRegistry =
- new OverrideFunctionRegistry(new HiveFunctionRegistry(FunctionRegistry.builtin))
+ new HiveFunctionRegistry(FunctionRegistry.builtin)
/* An analyzer that uses the Hive metastore. */
@transient
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 1deef6b37e..0bc8adb16a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -77,7 +77,7 @@ private[hive] class HiveFunctionRegistry(underlying: analysis.FunctionRegistry)
}
override def registerFunction(name: String, builder: FunctionBuilder): Unit =
- throw new UnsupportedOperationException
+ underlying.registerFunction(name, builder)
}
private[hive] case class HiveSimpleUDF(funcWrapper: HiveFunctionWrapper, children: Seq[Expression])