From f041e55eefe1d8a995fed321c66bccbd8b8e5255 Mon Sep 17 00:00:00 2001 From: Tejas Patil Date: Wed, 15 Feb 2017 22:45:58 -0800 Subject: [SPARK-19618][SQL] Inconsistency wrt max. buckets allowed from Dataframe API vs SQL ## What changes were proposed in this pull request? Jira: https://issues.apache.org/jira/browse/SPARK-19618 Moved the check for validating number of buckets from `DataFrameWriter` to `BucketSpec` creation ## How was this patch tested? - Added more unit tests Author: Tejas Patil Closes #16948 from tejasapatil/SPARK-19618_max_buckets. --- .../org/apache/spark/sql/sources/BucketedWriteSuite.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) (limited to 'sql/hive/src') diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala index 8528dfc4ce..61cef2a800 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala @@ -38,10 +38,14 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle intercept[AnalysisException](df.write.bucketBy(2, "k").saveAsTable("tt")) } - test("numBuckets not greater than 0 or less than 100000") { + test("numBuckets be greater than 0 but less than 100000") { val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j") - intercept[IllegalArgumentException](df.write.bucketBy(0, "i").saveAsTable("tt")) - intercept[IllegalArgumentException](df.write.bucketBy(100000, "i").saveAsTable("tt")) + + Seq(-1, 0, 100000).foreach(numBuckets => { + val e = intercept[AnalysisException](df.write.bucketBy(numBuckets, "i").saveAsTable("tt")) + assert( + e.getMessage.contains("Number of buckets should be greater than 0 but less than 100000")) + }) } test("specify sorting columns without bucketing columns") { -- cgit v1.2.3