aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorwangyang <wangyang@haizhi.com>2016-05-24 11:03:12 -0700
committerAndrew Or <andrew@databricks.com>2016-05-24 11:03:12 -0700
commit784cc07d1675eb9e0a387673cf86874e1bfc10f9 (patch)
tree3d8c8d953b21d9120981065d82f7db4ffa90d584 /sql
parenta313a5ae74ae4e7686283657ba56076222317595 (diff)
downloadspark-784cc07d1675eb9e0a387673cf86874e1bfc10f9.tar.gz
spark-784cc07d1675eb9e0a387673cf86874e1bfc10f9.tar.bz2
spark-784cc07d1675eb9e0a387673cf86874e1bfc10f9.zip
[SPARK-15388][SQL] Fix spark sql CREATE FUNCTION with hive 1.2.1
## What changes were proposed in this pull request? spark.sql("CREATE FUNCTION myfunc AS 'com.haizhi.bdp.udf.UDFGetGeoCode'") throws "org.apache.hadoop.hive.ql.metadata.HiveException:MetaException(message:NoSuchObjectException(message:Function default.myfunc does not exist))" with hive 1.2.1. I think it is introduced by pr #12853. Fixing it by catching Exception (not NoSuchObjectException) and string matching. ## How was this patch tested? added a unit test and also tested it manually Author: wangyang <wangyang@haizhi.com> Closes #13177 from wangyang1992/fixCreateFunc2.
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala14
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala1
2 files changed, 13 insertions, 2 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
index 78713c3f0b..9df4a26d55 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala
@@ -24,6 +24,7 @@ import java.util.{ArrayList => JArrayList, List => JList, Map => JMap, Set => JS
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
+import scala.util.control.NonFatal
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.hive.conf.HiveConf
@@ -42,7 +43,6 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException
import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, CatalogTablePartition, FunctionResource, FunctionResourceType}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{IntegralType, StringType}
-import org.apache.spark.util.CausedBy
/**
@@ -480,11 +480,21 @@ private[client] class Shim_v0_13 extends Shim_v0_12 {
try {
Option(hive.getFunction(db, name)).map(fromHiveFunction)
} catch {
- case CausedBy(ex: NoSuchObjectException) if ex.getMessage.contains(name) =>
+ case NonFatal(e) if isCausedBy(e, s"$name does not exist") =>
None
}
}
+ private def isCausedBy(e: Throwable, matchMassage: String): Boolean = {
+ if (e.getMessage.contains(matchMassage)) {
+ true
+ } else if (e.getCause != null) {
+ isCausedBy(e.getCause, matchMassage)
+ } else {
+ false
+ }
+ }
+
override def listFunctions(hive: Hive, db: String, pattern: String): Seq[String] = {
hive.getFunctions(db, pattern).asScala
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index d46c4e7b2b..8ae4535f4e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -440,6 +440,7 @@ class VersionsSuite extends SparkFunSuite with Logging {
assert(client.getFunctionOption("default", "func2").isEmpty)
} else {
assert(client.getFunctionOption("default", "func2").isDefined)
+ assert(client.getFunctionOption("default", "the_func_not_exists").isEmpty)
}
}