aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDilip Biswal <dbiswal@us.ibm.com>2016-05-05 14:44:45 -0700
committerAndrew Or <andrew@databricks.com>2016-05-05 14:44:45 -0700
commit02c07e8999dca545849cb3aa758a624dc51cd1e9 (patch)
tree300d3c943a0f79ef069c6208d2cdf08eb44762cf /sql
parent08db491265a3b50e31993ac6aa07c3f0dd08cdbb (diff)
downloadspark-02c07e8999dca545849cb3aa758a624dc51cd1e9.tar.gz
spark-02c07e8999dca545849cb3aa758a624dc51cd1e9.tar.bz2
spark-02c07e8999dca545849cb3aa758a624dc51cd1e9.zip
[SPARK-14893][SQL] Re-enable HiveSparkSubmitSuite SPARK-8489 test after HiveContext is removed
## What changes were proposed in this pull request? Enable the test that was disabled when HiveContext was removed. ## How was this patch tested? Made sure the enabled test passes with the new jar. Author: Dilip Biswal <dbiswal@us.ibm.com> Closes #12924 from dilipbiswal/spark-14893.
Diffstat (limited to 'sql')
-rw-r--r--sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala12
-rw-r--r--sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jarbin6873 -> 6865 bytes
-rw-r--r--sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jarbin7039 -> 7030 bytes
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala3
4 files changed, 8 insertions, 7 deletions
diff --git a/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala b/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala
index 10a017df83..4fbbbacb76 100644
--- a/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala
+++ b/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*/
-import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
/**
@@ -33,15 +32,18 @@ object Main {
def main(args: Array[String]) {
// scalastyle:off println
println("Running regression test for SPARK-8489.")
- val sc = new SparkContext("local", "testing")
- val sparkSession = SparkSession.withHiveSupport(sc)
+ val spark = SparkSession.builder
+ .master("local")
+ .appName("testing")
+ .enableHiveSupport()
+ .getOrCreate()
// This line should not throw scala.reflect.internal.MissingRequirementError.
// See SPARK-8470 for more detail.
- val df = sparkSession.createDataFrame(Seq(MyCoolClass("1", "2", "3")))
+ val df = spark.createDataFrame(Seq(MyCoolClass("1", "2", "3")))
df.collect()
println("Regression test for SPARK-8489 success!")
// scalastyle:on println
- sc.stop()
+ spark.stop()
}
}
diff --git a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar
index 26d410f330..3f28d37b93 100644
--- a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar
+++ b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar
Binary files differ
diff --git a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar
index f34784752f..5e093697e2 100644
--- a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar
+++ b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar
Binary files differ
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index a320011799..a717a9978e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -142,8 +142,7 @@ class HiveSparkSubmitSuite
runSparkSubmit(args)
}
- // TODO: re-enable this after rebuilding the jar (HiveContext was removed)
- ignore("SPARK-8489: MissingRequirementError during reflection") {
+ test("SPARK-8489: MissingRequirementError during reflection") {
// This test uses a pre-built jar to test SPARK-8489. In a nutshell, this test creates
// a HiveContext and uses it to create a data frame from an RDD using reflection.
// Before the fix in SPARK-8470, this results in a MissingRequirementError because