aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/test
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-12-26 11:27:56 -0800
committergatorsmile <gatorsmile@gmail.com>2016-12-26 11:27:56 -0800
commitdd724c84c830d30385712d72b65e2a76a2ade700 (patch)
tree4b3e7fa79b8886d1b01c3c90a0b2f7419415de85 /sql/hive/src/test
parent8a7db8a608a9e27b10f205cc1b4ed5f2c3e83799 (diff)
downloadspark-dd724c84c830d30385712d72b65e2a76a2ade700.tar.gz
spark-dd724c84c830d30385712d72b65e2a76a2ade700.tar.bz2
spark-dd724c84c830d30385712d72b65e2a76a2ade700.zip
[SPARK-18989][SQL] DESC TABLE should not fail with format class not found
## What changes were proposed in this pull request? When we describe a table, we only wanna see the information of this table, not read it, so it's ok even if the format class is not present at the classpath. ## How was this patch tested? new regression test Author: Wenchen Fan <wenchen@databricks.com> Closes #16388 from cloud-fan/hive.
Diffstat (limited to 'sql/hive/src/test')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala46
1 files changed, 46 insertions, 0 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index a670560c59..9aa9ebf1aa 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -311,6 +311,29 @@ class HiveSparkSubmitSuite
runSparkSubmit(args)
}
+ test("SPARK-18989: DESC TABLE should not fail with format class not found") {
+ val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
+
+ val argsForCreateTable = Seq(
+ "--class", SPARK_18989_CREATE_TABLE.getClass.getName.stripSuffix("$"),
+ "--name", "SPARK-18947",
+ "--master", "local-cluster[2,1,1024]",
+ "--conf", "spark.ui.enabled=false",
+ "--conf", "spark.master.rest.enabled=false",
+ "--jars", TestHive.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath,
+ unusedJar.toString)
+ runSparkSubmit(argsForCreateTable)
+
+ val argsForShowTables = Seq(
+ "--class", SPARK_18989_DESC_TABLE.getClass.getName.stripSuffix("$"),
+ "--name", "SPARK-18947",
+ "--master", "local-cluster[2,1,1024]",
+ "--conf", "spark.ui.enabled=false",
+ "--conf", "spark.master.rest.enabled=false",
+ unusedJar.toString)
+ runSparkSubmit(argsForShowTables)
+ }
+
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
// This is copied from org.apache.spark.deploy.SparkSubmitSuite
private def runSparkSubmit(args: Seq[String]): Unit = {
@@ -853,3 +876,26 @@ object SPARK_18360 {
}
}
}
+
+object SPARK_18989_CREATE_TABLE {
+ def main(args: Array[String]): Unit = {
+ val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
+ spark.sql(
+ """
+ |CREATE TABLE IF NOT EXISTS base64_tbl(val string) STORED AS
+ |INPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat'
+ |OUTPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat'
+ """.stripMargin)
+ }
+}
+
+object SPARK_18989_DESC_TABLE {
+ def main(args: Array[String]): Unit = {
+ val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
+ try {
+ spark.sql("DESC base64_tbl")
+ } finally {
+ spark.sql("DROP TABLE IF EXISTS base64_tbl")
+ }
+ }
+}