aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala19
1 files changed, 15 insertions, 4 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
index ff44c6f294..61a281db85 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
@@ -59,11 +59,22 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
intercept[SparkException](df.write.bucketBy(3, "i").format("text").saveAsTable("tt"))
}
- test("write bucketed data to non-hive-table or existing hive table") {
+ test("write bucketed data using save()") {
val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j")
- intercept[IllegalArgumentException](df.write.bucketBy(2, "i").parquet("/tmp/path"))
- intercept[IllegalArgumentException](df.write.bucketBy(2, "i").json("/tmp/path"))
- intercept[IllegalArgumentException](df.write.bucketBy(2, "i").insertInto("tt"))
+
+ val e = intercept[IllegalArgumentException] {
+ df.write.bucketBy(2, "i").parquet("/tmp/path")
+ }
+ assert(e.getMessage == "'save' does not support bucketing right now.")
+ }
+
+ test("write bucketed data using insertInto()") {
+ val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j")
+
+ val e = intercept[IllegalArgumentException] {
+ df.write.bucketBy(2, "i").insertInto("tt")
+ }
+ assert(e.getMessage == "'insertInto' does not support bucketing right now.")
}
private val df = (0 until 50).map(i => (i % 5, i % 13, i.toString)).toDF("i", "j", "k")