You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2017/10/25 05:59:49 UTC

spark git commit: [SPARK-21101][SQL] Catch IllegalStateException when CREATE TEMPORARY FUNCTION

Repository: spark
Updated Branches:
  refs/heads/master bc1e76632 -> 524abb996


[SPARK-21101][SQL] Catch IllegalStateException when CREATE TEMPORARY FUNCTION

## What changes were proposed in this pull request?

It must `override` [`public StructObjectInspector initialize(ObjectInspector[] argOIs)`](https://github.com/apache/hive/blob/release-2.0.0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java#L70) when create a UDTF.

If you `override` [`public StructObjectInspector initialize(StructObjectInspector argOIs)`](https://github.com/apache/hive/blob/release-2.0.0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java#L49), `IllegalStateException` will throw. per: [HIVE-12377](https://issues.apache.org/jira/browse/HIVE-12377).

This PR catch `IllegalStateException` and point user to `override` `public StructObjectInspector initialize(ObjectInspector[] argOIs)`.

## How was this patch tested?

unit tests

Source code and binary jar: [SPARK-21101.zip](https://github.com/apache/spark/files/1123763/SPARK-21101.zip)
These two source code copy from : https://github.com/apache/hive/blob/release-2.0.0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java

Author: Yuming Wang <wg...@gmail.com>

Closes #18527 from wangyum/SPARK-21101.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/524abb99
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/524abb99
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/524abb99

Branch: refs/heads/master
Commit: 524abb996abc9970d699623c13469ea3b6d2d3fc
Parents: bc1e766
Author: Yuming Wang <wg...@gmail.com>
Authored: Tue Oct 24 22:59:46 2017 -0700
Committer: gatorsmile <ga...@gmail.com>
Committed: Tue Oct 24 22:59:46 2017 -0700

----------------------------------------------------------------------
 .../spark/sql/hive/HiveSessionCatalog.scala     |  11 ++++++--
 sql/hive/src/test/resources/SPARK-21101-1.0.jar | Bin 0 -> 7439 bytes
 .../sql/hive/execution/SQLQuerySuite.scala      |  26 +++++++++++++++++++
 3 files changed, 35 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/524abb99/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index b256ffc..1f11adb 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -94,8 +94,15 @@ private[sql] class HiveSessionCatalog(
         }
       } catch {
         case NonFatal(e) =>
-          val analysisException =
-            new AnalysisException(s"No handler for UDF/UDAF/UDTF '${clazz.getCanonicalName}': $e")
+          val noHandlerMsg = s"No handler for UDF/UDAF/UDTF '${clazz.getCanonicalName}': $e"
+          val errorMsg =
+            if (classOf[GenericUDTF].isAssignableFrom(clazz)) {
+              s"$noHandlerMsg\nPlease make sure your function overrides " +
+                "`public StructObjectInspector initialize(ObjectInspector[] args)`."
+            } else {
+              noHandlerMsg
+            }
+          val analysisException = new AnalysisException(errorMsg)
           analysisException.setStackTrace(e.getStackTrace)
           throw analysisException
       }

http://git-wip-us.apache.org/repos/asf/spark/blob/524abb99/sql/hive/src/test/resources/SPARK-21101-1.0.jar
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/resources/SPARK-21101-1.0.jar b/sql/hive/src/test/resources/SPARK-21101-1.0.jar
new file mode 100644
index 0000000..768b233
Binary files /dev/null and b/sql/hive/src/test/resources/SPARK-21101-1.0.jar differ

http://git-wip-us.apache.org/repos/asf/spark/blob/524abb99/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 39e918c..c11e37a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -2010,6 +2010,32 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
     }
   }
 
+  test("SPARK-21101 UDTF should override initialize(ObjectInspector[] args)") {
+    withUserDefinedFunction("udtf_stack1" -> true, "udtf_stack2" -> true) {
+      sql(
+        s"""
+           |CREATE TEMPORARY FUNCTION udtf_stack1
+           |AS 'org.apache.spark.sql.hive.execution.UDTFStack'
+           |USING JAR '${hiveContext.getHiveFile("SPARK-21101-1.0.jar").toURI}'
+        """.stripMargin)
+      val cnt =
+        sql("SELECT udtf_stack1(2, 'A', 10, date '2015-01-01', 'B', 20, date '2016-01-01')").count()
+      assert(cnt === 2)
+
+      sql(
+        s"""
+           |CREATE TEMPORARY FUNCTION udtf_stack2
+           |AS 'org.apache.spark.sql.hive.execution.UDTFStack2'
+           |USING JAR '${hiveContext.getHiveFile("SPARK-21101-1.0.jar").toURI}'
+        """.stripMargin)
+      val e = intercept[org.apache.spark.sql.AnalysisException] {
+        sql("SELECT udtf_stack2(2, 'A', 10, date '2015-01-01', 'B', 20, date '2016-01-01')")
+      }
+      assert(
+        e.getMessage.contains("public StructObjectInspector initialize(ObjectInspector[] args)"))
+    }
+  }
+
   test("SPARK-21721: Clear FileSystem deleterOnExit cache if path is successfully removed") {
     val table = "test21721"
     withTable(table) {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org