aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala1
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala11
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala9
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala8
4 files changed, 18 insertions, 11 deletions
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index ff8ca01506..5903b9e71c 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -41,6 +41,7 @@ import org.apache.thrift.transport.TSocket
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SparkFunSuite}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
index 3dce86c480..f1c2489b38 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.hive.client
import java.io.{File, PrintStream}
import java.util.{Map => JMap}
-import javax.annotation.concurrent.GuardedBy
import scala.collection.JavaConverters._
import scala.language.reflectiveCalls
@@ -548,7 +547,15 @@ private[hive] class ClientWrapper(
}
def addJar(path: String): Unit = {
- clientLoader.addJar(path)
+ val uri = new Path(path).toUri
+ val jarURL = if (uri.getScheme == null) {
+ // `path` is a local file path without a URL scheme
+ new File(path).toURI.toURL
+ } else {
+ // `path` is a URL with a scheme
+ uri.toURL
+ }
+ clientLoader.addJar(jarURL)
runSqlHive(s"ADD JAR $path")
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
index f99c3ed2ae..e041e0d8e5 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
@@ -22,7 +22,6 @@ import java.lang.reflect.InvocationTargetException
import java.net.{URL, URLClassLoader}
import java.util
-import scala.collection.mutable
import scala.language.reflectiveCalls
import scala.util.Try
@@ -30,10 +29,9 @@ import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.spark.Logging
import org.apache.spark.deploy.SparkSubmitUtils
-import org.apache.spark.util.{MutableURLClassLoader, Utils}
-
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.util.{MutableURLClassLoader, Utils}
/** Factory for `IsolatedClientLoader` with specific versions of hive. */
private[hive] object IsolatedClientLoader {
@@ -190,9 +188,8 @@ private[hive] class IsolatedClientLoader(
new NonClosableMutableURLClassLoader(isolatedClassLoader)
}
- private[hive] def addJar(path: String): Unit = synchronized {
- val jarURL = new java.io.File(path).toURI.toURL
- classLoader.addURL(jarURL)
+ private[hive] def addJar(path: URL): Unit = synchronized {
+ classLoader.addURL(path)
}
/** The isolated client interface to Hive. */
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index fc72e3c7dc..78378c8b69 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -927,7 +927,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
test("SPARK-2263: Insert Map<K, V> values") {
sql("CREATE TABLE m(value MAP<INT, STRING>)")
sql("INSERT OVERWRITE TABLE m SELECT MAP(key, value) FROM src LIMIT 10")
- sql("SELECT * FROM m").collect().zip(sql("SELECT * FROM src LIMIT 10").collect()).map {
+ sql("SELECT * FROM m").collect().zip(sql("SELECT * FROM src LIMIT 10").collect()).foreach {
case (Row(map: Map[_, _]), Row(key: Int, value: String)) =>
assert(map.size === 1)
assert(map.head === (key, value))
@@ -961,10 +961,12 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
test("CREATE TEMPORARY FUNCTION") {
val funcJar = TestHive.getHiveFile("TestUDTF.jar").getCanonicalPath
- sql(s"ADD JAR $funcJar")
+ val jarURL = s"file://$funcJar"
+ sql(s"ADD JAR $jarURL")
sql(
"""CREATE TEMPORARY FUNCTION udtf_count2 AS
- | 'org.apache.spark.sql.hive.execution.GenericUDTFCount2'""".stripMargin)
+ |'org.apache.spark.sql.hive.execution.GenericUDTFCount2'
+ """.stripMargin)
assert(sql("DESCRIBE FUNCTION udtf_count2").count > 1)
sql("DROP TEMPORARY FUNCTION udtf_count2")
}