You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2018/01/16 02:47:48 UTC
spark git commit: [SPARK-23080][SQL] Improve error message for
built-in functions
Repository: spark
Updated Branches:
refs/heads/master 6c81fe227 -> 8ab2d7ea9
[SPARK-23080][SQL] Improve error message for built-in functions
## What changes were proposed in this pull request?
When a user puts the wrong number of parameters in a function, an AnalysisException is thrown. If the function is a UDF, he user is told how many parameters the function expected and how many he/she put. If the function, instead, is a built-in one, no information about the number of parameters expected and the actual one is provided. This can help in some cases, to debug the errors (eg. bad quotes escaping may lead to a different number of parameters than expected, etc. etc.)
The PR adds the information about the number of parameters passed and the expected one, analogously to what happens for UDF.
## How was this patch tested?
modified existing UT + manual test
Author: Marco Gaido <ma...@gmail.com>
Closes #20271 from mgaido91/SPARK-23080.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8ab2d7ea
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8ab2d7ea
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8ab2d7ea
Branch: refs/heads/master
Commit: 8ab2d7ea99b2cff8b54b2cb3a1dbf7580845986a
Parents: 6c81fe2
Author: Marco Gaido <ma...@gmail.com>
Authored: Tue Jan 16 11:47:42 2018 +0900
Committer: hyukjinkwon <gu...@gmail.com>
Committed: Tue Jan 16 11:47:42 2018 +0900
----------------------------------------------------------------------
.../spark/sql/catalyst/analysis/FunctionRegistry.scala | 10 +++++++++-
.../resources/sql-tests/results/json-functions.sql.out | 4 ++--
.../src/test/scala/org/apache/spark/sql/UDFSuite.scala | 4 ++--
3 files changed, 13 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/8ab2d7ea/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 5ddb398..747016b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -526,7 +526,15 @@ object FunctionRegistry {
// Otherwise, find a constructor method that matches the number of arguments, and use that.
val params = Seq.fill(expressions.size)(classOf[Expression])
val f = constructors.find(_.getParameterTypes.toSeq == params).getOrElse {
- throw new AnalysisException(s"Invalid number of arguments for function $name")
+ val validParametersCount = constructors.map(_.getParameterCount).distinct.sorted
+ val expectedNumberOfParameters = if (validParametersCount.length == 1) {
+ validParametersCount.head.toString
+ } else {
+ validParametersCount.init.mkString("one of ", ", ", " and ") +
+ validParametersCount.last
+ }
+ throw new AnalysisException(s"Invalid number of arguments for function $name. " +
+ s"Expected: $expectedNumberOfParameters; Found: ${params.length}")
}
Try(f.newInstance(expressions : _*).asInstanceOf[Expression]) match {
case Success(e) => e
http://git-wip-us.apache.org/repos/asf/spark/blob/8ab2d7ea/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
index d9dc728..581dddc 100644
--- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
@@ -129,7 +129,7 @@ select to_json()
struct<>
-- !query 12 output
org.apache.spark.sql.AnalysisException
-Invalid number of arguments for function to_json; line 1 pos 7
+Invalid number of arguments for function to_json. Expected: one of 1, 2 and 3; Found: 0; line 1 pos 7
-- !query 13
@@ -225,7 +225,7 @@ select from_json()
struct<>
-- !query 21 output
org.apache.spark.sql.AnalysisException
-Invalid number of arguments for function from_json; line 1 pos 7
+Invalid number of arguments for function from_json. Expected: one of 2, 3 and 4; Found: 0; line 1 pos 7
-- !query 22
http://git-wip-us.apache.org/repos/asf/spark/blob/8ab2d7ea/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
index db37be6..af6a10b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
@@ -80,7 +80,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
val e = intercept[AnalysisException] {
df.selectExpr("substr('abcd', 2, 3, 4)")
}
- assert(e.getMessage.contains("Invalid number of arguments for function substr"))
+ assert(e.getMessage.contains("Invalid number of arguments for function substr. Expected:"))
}
test("error reporting for incorrect number of arguments - udf") {
@@ -89,7 +89,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
spark.udf.register("foo", (_: String).length)
df.selectExpr("foo(2, 3, 4)")
}
- assert(e.getMessage.contains("Invalid number of arguments for function foo"))
+ assert(e.getMessage.contains("Invalid number of arguments for function foo. Expected:"))
}
test("error reporting for undefined functions") {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org