aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2015-11-12 12:17:51 -0800
committerMichael Armbrust <michael@databricks.com>2015-11-12 12:17:51 -0800
commit4fe99c72c60646b1372bb2c089c6fc7c4fa11644 (patch)
treea3101600fe5ba7732c90bbd12c826d8bd12aea9a /sql/hive
parentdf0e318152165c8e50793aff13aaca5d2d9b8b9d (diff)
downloadspark-4fe99c72c60646b1372bb2c089c6fc7c4fa11644.tar.gz
spark-4fe99c72c60646b1372bb2c089c6fc7c4fa11644.tar.bz2
spark-4fe99c72c60646b1372bb2c089c6fc7c4fa11644.zip
[SPARK-11191][SQL] Looks up temporary function using execution Hive client
When looking up Hive temporary functions, we should always use the `SessionState` within the execution Hive client, since temporary functions are registered there. Author: Cheng Lian <lian@databricks.com> Closes #9664 from liancheng/spark-11191.fix-temp-function.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala14
2 files changed, 11 insertions, 5 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index ba6204633b..0c473799cc 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -454,7 +454,7 @@ class HiveContext private[hive](
// Note that HiveUDFs will be overridden by functions registered in this context.
@transient
override protected[sql] lazy val functionRegistry: FunctionRegistry =
- new HiveFunctionRegistry(FunctionRegistry.builtin.copy()) {
+ new HiveFunctionRegistry(FunctionRegistry.builtin.copy(), this) {
override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
// Hive Registry need current database to lookup function
// TODO: the current database of executionHive should be consistent with metadataHive
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index a9db70119d..e6fe2ad5f2 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -46,17 +46,23 @@ import org.apache.spark.sql.hive.HiveShim._
import org.apache.spark.sql.types._
-private[hive] class HiveFunctionRegistry(underlying: analysis.FunctionRegistry)
+private[hive] class HiveFunctionRegistry(
+ underlying: analysis.FunctionRegistry,
+ hiveContext: HiveContext)
extends analysis.FunctionRegistry with HiveInspectors {
- def getFunctionInfo(name: String): FunctionInfo = FunctionRegistry.getFunctionInfo(name)
+ def getFunctionInfo(name: String): FunctionInfo = {
+ hiveContext.executionHive.withHiveState {
+ FunctionRegistry.getFunctionInfo(name)
+ }
+ }
override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
Try(underlying.lookupFunction(name, children)).getOrElse {
// We only look it up to see if it exists, but do not include it in the HiveUDF since it is
// not always serializable.
val functionInfo: FunctionInfo =
- Option(FunctionRegistry.getFunctionInfo(name.toLowerCase)).getOrElse(
+ Option(getFunctionInfo(name.toLowerCase)).getOrElse(
throw new AnalysisException(s"undefined function $name"))
val functionClassName = functionInfo.getFunctionClass.getName
@@ -110,7 +116,7 @@ private[hive] class HiveFunctionRegistry(underlying: analysis.FunctionRegistry)
override def lookupFunction(name: String): Option[ExpressionInfo] = {
underlying.lookupFunction(name).orElse(
Try {
- val info = FunctionRegistry.getFunctionInfo(name)
+ val info = getFunctionInfo(name)
val annotation = info.getFunctionClass.getAnnotation(classOf[Description])
if (annotation != null) {
Some(new ExpressionInfo(