You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2015/05/08 01:22:49 UTC
spark git commit: [SPARK-7277] [SQL] Throw exception if the property
mapred.reduce.tasks is set to -1
Repository: spark
Updated Branches:
refs/heads/master 97d1182af -> ea3077f19
[SPARK-7277] [SQL] Throw exception if the property mapred.reduce.tasks is set to -1
JIRA: https://issues.apache.org/jira/browse/SPARK-7277
As automatically determining the number of reducers is not supported (`mapred.reduce.tasks` is set to `-1`), we should throw exception to users.
Author: Liang-Chi Hsieh <vi...@gmail.com>
Closes #5811 from viirya/no_neg_reduce_tasks and squashes the following commits:
e518f96 [Liang-Chi Hsieh] Consider other wrong setting values.
fd9c817 [Liang-Chi Hsieh] Merge remote-tracking branch 'upstream/master' into no_neg_reduce_tasks
4ede705 [Liang-Chi Hsieh] Throw exception instead of warning message.
68a1c70 [Liang-Chi Hsieh] Show warning message if mapred.reduce.tasks is set to -1.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ea3077f1
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ea3077f1
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ea3077f1
Branch: refs/heads/master
Commit: ea3077f19c18b5556a3632b36771aeb153746ff5
Parents: 97d1182
Author: Liang-Chi Hsieh <vi...@gmail.com>
Authored: Thu May 7 16:22:45 2015 -0700
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Thu May 7 16:22:45 2015 -0700
----------------------------------------------------------------------
.../scala/org/apache/spark/sql/execution/commands.scala | 10 ++++++++--
.../test/scala/org/apache/spark/sql/SQLQuerySuite.scala | 10 ++++++++++
2 files changed, 18 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/ea3077f1/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 98df5be..65687db 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -84,8 +84,14 @@ case class SetCommand(
logWarning(
s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " +
s"automatically converted to ${SQLConf.SHUFFLE_PARTITIONS} instead.")
- sqlContext.setConf(SQLConf.SHUFFLE_PARTITIONS, value)
- Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=$value"))
+ if (value.toInt < 1) {
+ val msg = s"Setting negative ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} for automatically " +
+ "determining the number of reducers is not supported."
+ throw new IllegalArgumentException(msg)
+ } else {
+ sqlContext.setConf(SQLConf.SHUFFLE_PARTITIONS, value)
+ Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=$value"))
+ }
// Configures a single property.
case Some((key, Some(value))) =>
http://git-wip-us.apache.org/repos/asf/spark/blob/ea3077f1/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 208cec6..77be3b8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -871,6 +871,16 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
conf.clear()
}
+ test("SET commands with illegal or inappropriate argument") {
+ conf.clear()
+ // Set negative mapred.reduce.tasks for automatically determing
+ // the number of reducers is not supported
+ intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-1"))
+ intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-01"))
+ intercept[IllegalArgumentException](sql(s"SET mapred.reduce.tasks=-2"))
+ conf.clear()
+ }
+
test("apply schema") {
val schema1 = StructType(
StructField("f1", IntegerType, false) ::
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org