aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-03-28 16:45:02 -0700
committerAndrew Or <andrew@databricks.com>2016-03-28 16:45:02 -0700
commit27aab80695cfcf0c0ecf1e98a5a862a8123213a1 (patch)
tree12d99ef63b8382f3c825d8a32825a97bf66c5d23 /sql/hive
parent2f98ee67dff0be38a4c92d7d29c8cc8ea8b6576e (diff)
downloadspark-27aab80695cfcf0c0ecf1e98a5a862a8123213a1.tar.gz
spark-27aab80695cfcf0c0ecf1e98a5a862a8123213a1.tar.bz2
spark-27aab80695cfcf0c0ecf1e98a5a862a8123213a1.zip
[SPARK-14013][SQL] Proper temp function support in catalog
## What changes were proposed in this pull request? Session catalog was added in #11750. However, it doesn't really support temporary functions properly; right now we only store the metadata in the form of `CatalogFunction`, but this doesn't make sense for temporary functions because there is no class name. This patch moves the `FunctionRegistry` into the `SessionCatalog`. With this, the user can call `catalog.createTempFunction` and `catalog.lookupFunction` to use the function they registered previously. This is currently still dead code, however. ## How was this patch tested? `SessionCatalogSuite`. Author: Andrew Or <andrew@databricks.com> Closes #11972 from andrewor14/temp-functions.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala14
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala10
3 files changed, 20 insertions, 8 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index ec7bf61be1..ff12245e8d 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.hive
import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.catalog.SessionCatalog
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.catalyst.rules.Rule
@@ -31,8 +32,9 @@ class HiveSessionCatalog(
externalCatalog: HiveExternalCatalog,
client: HiveClient,
context: HiveContext,
+ functionRegistry: FunctionRegistry,
conf: SQLConf)
- extends SessionCatalog(externalCatalog, conf) {
+ extends SessionCatalog(externalCatalog, functionRegistry, conf) {
override def setCurrentDatabase(db: String): Unit = {
super.setCurrentDatabase(db)
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
index caa7f296ed..c9b6b1dfb6 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
@@ -35,13 +35,6 @@ private[hive] class HiveSessionState(ctx: HiveContext) extends SessionState(ctx)
}
/**
- * Internal catalog for managing table and database states.
- */
- override lazy val catalog = {
- new HiveSessionCatalog(ctx.hiveCatalog, ctx.metadataHive, ctx, conf)
- }
-
- /**
* Internal catalog for managing functions registered by the user.
* Note that HiveUDFs will be overridden by functions registered in this context.
*/
@@ -50,6 +43,13 @@ private[hive] class HiveSessionState(ctx: HiveContext) extends SessionState(ctx)
}
/**
+ * Internal catalog for managing table and database states.
+ */
+ override lazy val catalog = {
+ new HiveSessionCatalog(ctx.hiveCatalog, ctx.metadataHive, ctx, functionRegistry, conf)
+ }
+
+ /**
* An analyzer that uses the Hive metastore.
*/
override lazy val analyzer: Analyzer = {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index efaa052370..c07c428895 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -141,6 +141,16 @@ private[hive] class HiveFunctionRegistry(
}
}.getOrElse(None))
}
+
+ override def lookupFunctionBuilder(name: String): Option[FunctionBuilder] = {
+ underlying.lookupFunctionBuilder(name)
+ }
+
+ // Note: This does not drop functions stored in the metastore
+ override def dropFunction(name: String): Boolean = {
+ underlying.dropFunction(name)
+ }
+
}
private[hive] case class HiveSimpleUDF(