You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/11/23 06:25:31 UTC
spark git commit: [SPARK-18179][SQL] Throws analysis exception with a
proper message for unsupported argument types in reflect/java_method function
Repository: spark
Updated Branches:
refs/heads/master 982b82e32 -> 2559fb4b4
[SPARK-18179][SQL] Throws analysis exception with a proper message for unsupported argument types in reflect/java_method function
## What changes were proposed in this pull request?
This PR proposes throwing an `AnalysisException` with a proper message rather than `NoSuchElementException` with the message ` key not found: TimestampType` when unsupported types are given to `reflect` and `java_method` functions.
```scala
spark.range(1).selectExpr("reflect('java.lang.String', 'valueOf', cast('1990-01-01' as timestamp))")
```
produces
**Before**
```
java.util.NoSuchElementException: key not found: TimestampType
at scala.collection.MapLike$class.default(MapLike.scala:228)
at scala.collection.AbstractMap.default(Map.scala:59)
at scala.collection.MapLike$class.apply(MapLike.scala:141)
at scala.collection.AbstractMap.apply(Map.scala:59)
at org.apache.spark.sql.catalyst.expressions.CallMethodViaReflection$$anonfun$findMethod$1$$anonfun$apply$1.apply(CallMethodViaReflection.scala:159)
...
```
**After**
```
cannot resolve 'reflect('java.lang.String', 'valueOf', CAST('1990-01-01' AS TIMESTAMP))' due to data type mismatch: arguments from the third require boolean, byte, short, integer, long, float, double or string expressions; line 1 pos 0;
'Project [unresolvedalias(reflect(java.lang.String, valueOf, cast(1990-01-01 as timestamp)), Some(<function1>))]
+- Range (0, 1, step=1, splits=Some(2))
...
```
Added message is,
```
arguments from the third require boolean, byte, short, integer, long, float, double or string expressions
```
## How was this patch tested?
Tests added in `CallMethodViaReflection`.
Author: hyukjinkwon <gu...@gmail.com>
Closes #15694 from HyukjinKwon/SPARK-18179.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2559fb4b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2559fb4b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2559fb4b
Branch: refs/heads/master
Commit: 2559fb4b40c9f42f7b3ed2b77de14461f68b6fa5
Parents: 982b82e
Author: hyukjinkwon <gu...@gmail.com>
Authored: Tue Nov 22 22:25:27 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Tue Nov 22 22:25:27 2016 -0800
----------------------------------------------------------------------
.../sql/catalyst/expressions/CallMethodViaReflection.scala | 4 ++++
.../catalyst/expressions/CallMethodViaReflectionSuite.scala | 9 +++++++++
2 files changed, 13 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/2559fb4b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
index 40f1b14..4859e0c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
@@ -65,6 +65,10 @@ case class CallMethodViaReflection(children: Seq[Expression])
TypeCheckFailure("first two arguments should be string literals")
} else if (!classExists) {
TypeCheckFailure(s"class $className not found")
+ } else if (children.slice(2, children.length)
+ .exists(e => !CallMethodViaReflection.typeMapping.contains(e.dataType))) {
+ TypeCheckFailure("arguments from the third require boolean, byte, short, " +
+ "integer, long, float, double or string expressions")
} else if (method == null) {
TypeCheckFailure(s"cannot find a static method that matches the argument types in $className")
} else {
http://git-wip-us.apache.org/repos/asf/spark/blob/2559fb4b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
index 43367c7..88d4d46 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.catalyst.expressions
+import java.sql.Timestamp
+
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.TypeCheckFailure
import org.apache.spark.sql.types.{IntegerType, StringType}
@@ -85,6 +87,13 @@ class CallMethodViaReflectionSuite extends SparkFunSuite with ExpressionEvalHelp
assert(createExpr(staticClassName, "method1").checkInputDataTypes().isSuccess)
}
+ test("unsupported type checking") {
+ val ret = createExpr(staticClassName, "method1", new Timestamp(1)).checkInputDataTypes()
+ assert(ret.isFailure)
+ val errorMsg = ret.asInstanceOf[TypeCheckFailure].message
+ assert(errorMsg.contains("arguments from the third require boolean, byte, short"))
+ }
+
test("invoking methods using acceptable types") {
checkEvaluation(createExpr(staticClassName, "method1"), "m1")
checkEvaluation(createExpr(staticClassName, "method2", 2), "m2")
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org