aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorLiang-Chi Hsieh <viirya@gmail.com>2015-05-07 16:22:45 -0700
committerMichael Armbrust <michael@databricks.com>2015-05-07 16:22:45 -0700
commitea3077f19c18b5556a3632b36771aeb153746ff5 (patch)
tree3f764bec96b2f01e2bc30e0f89de22ca0c5bbc90 /sql
parent97d1182af63d55abab44521171652c81c56c6af6 (diff)
downloadspark-ea3077f19c18b5556a3632b36771aeb153746ff5.tar.gz
spark-ea3077f19c18b5556a3632b36771aeb153746ff5.tar.bz2
spark-ea3077f19c18b5556a3632b36771aeb153746ff5.zip
[SPARK-7277] [SQL] Throw exception if the property mapred.reduce.tasks is set to -1
JIRA: https://issues.apache.org/jira/browse/SPARK-7277 As automatically determining the number of reducers is not supported (`mapred.reduce.tasks` is set to `-1`), we should throw exception to users. Author: Liang-Chi Hsieh <viirya@gmail.com> Closes #5811 from viirya/no_neg_reduce_tasks and squashes the following commits: e518f96 [Liang-Chi Hsieh] Consider other wrong setting values. fd9c817 [Liang-Chi Hsieh] Merge remote-tracking branch 'upstream/master' into no_neg_reduce_tasks 4ede705 [Liang-Chi Hsieh] Throw exception instead of warning message. 68a1c70 [Liang-Chi Hsieh] Show warning message if mapred.reduce.tasks is set to -1.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala10
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala10
2 files changed, 18 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 98df5bef34..65687db4e6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -84,8 +84,14 @@ case class SetCommand(
logWarning(
s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " +
s"automatically converted to ${SQLConf.SHUFFLE_PARTITIONS} instead.")
- sqlContext.setConf(SQLConf.SHUFFLE_PARTITIONS, value)
- Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=$value"))
+ if (value.toInt < 1) {
+ val msg = s"Setting negative ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} for automatically " +
+ "determining the number of reducers is not supported."
+ throw new IllegalArgumentException(msg)
+ } else {
+ sqlContext.setConf(SQLConf.SHUFFLE_PARTITIONS, value)
+ Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=$value"))
+ }
// Configures a single property.
case Some((key, Some(value))) =>
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 208cec6a32..77be3b8b20 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -871,6 +871,16 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
conf.clear()
}
+ test("SET commands with illegal or inappropriate argument") {
+ conf.clear()
+ // Set negative mapred.reduce.tasks for automatically determing
+ // the number of reducers is not supported
+ intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-1"))
+ intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-01"))
+ intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-2"))
+ conf.clear()
+ }
+
test("apply schema") {
val schema1 = StructType(
StructField("f1", IntegerType, false) ::