aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorscwf <wangfei1@huawei.com>2014-10-05 17:47:20 -0700
committerMichael Armbrust <michael@databricks.com>2014-10-05 17:49:41 -0700
commit58f5361caaa2f898e38ae4b3794167881e20a818 (patch)
treedbe6b16951c274fd5bb6486f257f717e585abe63 /sql
parent79b2108de30bf91c8e58bb36405d334aeb2a00ad (diff)
downloadspark-58f5361caaa2f898e38ae4b3794167881e20a818.tar.gz
spark-58f5361caaa2f898e38ae4b3794167881e20a818.tar.bz2
spark-58f5361caaa2f898e38ae4b3794167881e20a818.zip
[SPARK-3792][SQL] Enable JavaHiveQLSuite
Do not use TestSQLContext in JavaHiveQLSuite, that may lead to two SparkContexts in one jvm and enable JavaHiveQLSuite Author: scwf <wangfei1@huawei.com> Closes #2652 from scwf/fix-JavaHiveQLSuite and squashes the following commits: be35c91 [scwf] enable JavaHiveQLSuite
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala27
1 files changed, 9 insertions, 18 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
index 9644b707eb..46b11b582b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
@@ -25,34 +25,30 @@ import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.sql.api.java.JavaSchemaRDD
import org.apache.spark.sql.execution.ExplainCommand
import org.apache.spark.sql.hive.test.TestHive
-import org.apache.spark.sql.test.TestSQLContext
// Implicits
import scala.collection.JavaConversions._
class JavaHiveQLSuite extends FunSuite {
- lazy val javaCtx = new JavaSparkContext(TestSQLContext.sparkContext)
+ lazy val javaCtx = new JavaSparkContext(TestHive.sparkContext)
// There is a little trickery here to avoid instantiating two HiveContexts in the same JVM
lazy val javaHiveCtx = new JavaHiveContext(javaCtx) {
override val sqlContext = TestHive
}
- ignore("SELECT * FROM src") {
+ test("SELECT * FROM src") {
assert(
javaHiveCtx.sql("SELECT * FROM src").collect().map(_.getInt(0)) ===
TestHive.sql("SELECT * FROM src").collect().map(_.getInt(0)).toSeq)
}
- private val explainCommandClassName =
- classOf[ExplainCommand].getSimpleName.stripSuffix("$")
-
def isExplanation(result: JavaSchemaRDD) = {
val explanation = result.collect().map(_.getString(0))
- explanation.size > 1 && explanation.head.startsWith(explainCommandClassName)
+ explanation.size > 1 && explanation.head.startsWith("== Physical Plan ==")
}
- ignore("Query Hive native command execution result") {
+ test("Query Hive native command execution result") {
val tableName = "test_native_commands"
assertResult(0) {
@@ -63,23 +59,18 @@ class JavaHiveQLSuite extends FunSuite {
javaHiveCtx.sql(s"CREATE TABLE $tableName(key INT, value STRING)").count()
}
- javaHiveCtx.sql("SHOW TABLES").registerTempTable("show_tables")
-
assert(
javaHiveCtx
- .sql("SELECT result FROM show_tables")
+ .sql("SHOW TABLES")
.collect()
.map(_.getString(0))
.contains(tableName))
- assertResult(Array(Array("key", "int", "None"), Array("value", "string", "None"))) {
- javaHiveCtx.sql(s"DESCRIBE $tableName").registerTempTable("describe_table")
-
-
+ assertResult(Array(Array("key", "int"), Array("value", "string"))) {
javaHiveCtx
- .sql("SELECT result FROM describe_table")
+ .sql(s"describe $tableName")
.collect()
- .map(_.getString(0).split("\t").map(_.trim))
+ .map(row => Array(row.get(0).asInstanceOf[String], row.get(1).asInstanceOf[String]))
.toArray
}
@@ -89,7 +80,7 @@ class JavaHiveQLSuite extends FunSuite {
TestHive.reset()
}
- ignore("Exactly once semantics for DDL and command statements") {
+ test("Exactly once semantics for DDL and command statements") {
val tableName = "test_exactly_once"
val q0 = javaHiveCtx.sql(s"CREATE TABLE $tableName(key INT, value STRING)")