aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala10
1 files changed, 7 insertions, 3 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
index 8528dfc4ce..61cef2a800 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
@@ -38,10 +38,14 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
intercept[AnalysisException](df.write.bucketBy(2, "k").saveAsTable("tt"))
}
- test("numBuckets not greater than 0 or less than 100000") {
+ test("numBuckets be greater than 0 but less than 100000") {
val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j")
- intercept[IllegalArgumentException](df.write.bucketBy(0, "i").saveAsTable("tt"))
- intercept[IllegalArgumentException](df.write.bucketBy(100000, "i").saveAsTable("tt"))
+
+ Seq(-1, 0, 100000).foreach(numBuckets => {
+ val e = intercept[AnalysisException](df.write.bucketBy(numBuckets, "i").saveAsTable("tt"))
+ assert(
+ e.getMessage.contains("Number of buckets should be greater than 0 but less than 100000"))
+ })
}
test("specify sorting columns without bucketing columns") {