You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by bo...@apache.org on 2023/05/31 12:11:02 UTC

[kyuubi] branch master updated: [KYUUBI #4910] Extract table from ResolvedIdentifier for DropTable in Spark 3.4

This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 5cc51c8ac [KYUUBI #4910] Extract table from ResolvedIdentifier for DropTable in Spark 3.4
5cc51c8ac is described below

commit 5cc51c8ac6c592b13ca55fd9a72411d6caf64690
Author: liangbowen <li...@gf.com.cn>
AuthorDate: Wed May 31 20:10:48 2023 +0800

    [KYUUBI #4910] Extract table from ResolvedIdentifier for DropTable in Spark 3.4
    
    ### _Why are the changes needed?_
    
    - adapting changes in logical plan of DropTable in Spark 3.4 by extracting table object from ResolvedIdntifier, to fix test w/ Spark 3.4 ut "DropTable"
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #4910 from bowenliang123/authz-resolved-idtable.
    
    Closes #4910
    
    53c76f66d [liangbowen] Extract table from ResolvedIdentifier for DropTable in Spark 3.4
    
    Authored-by: liangbowen <li...@gf.com.cn>
    Signed-off-by: liangbowen <li...@gf.com.cn>
---
 .../src/main/resources/table_command_spec.json             |  9 +++++++++
 .../kyuubi/plugin/spark/authz/serde/tableExtractors.scala  | 14 +++++++++-----
 .../kyuubi/plugin/spark/authz/gen/TableCommands.scala      |  3 +--
 3 files changed, 19 insertions(+), 7 deletions(-)

diff --git a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index c67fdbfe7..a9f2ec06e 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -282,6 +282,15 @@
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropTable",
   "tableDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedIdentifierTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : null,
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  }, {
     "fieldName" : "child",
     "fieldExtractor" : "ResolvedTableTableExtractor",
     "columnDesc" : null,
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
index 53189599a..8743f054d 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
@@ -171,10 +171,14 @@ class ResolvedDbObjectNameTableExtractor extends TableExtractor {
  */
 class ResolvedIdentifierTableExtractor extends TableExtractor {
   override def apply(spark: SparkSession, v1: AnyRef): Option[Table] = {
-    val catalogVal = invoke(v1, "catalog")
-    val catalog = new CatalogPluginCatalogExtractor().apply(catalogVal)
-    val identifier = invoke(v1, "identifier")
-    val maybeTable = new IdentifierTableExtractor().apply(spark, identifier)
-    maybeTable.map(_.copy(catalog = catalog))
+    v1.getClass.getName match {
+      case "org.apache.spark.sql.catalyst.analysis.ResolvedIdentifier" =>
+        val catalogVal = invoke(v1, "catalog")
+        val catalog = new CatalogPluginCatalogExtractor().apply(catalogVal)
+        val identifier = invoke(v1, "identifier")
+        val maybeTable = new IdentifierTableExtractor().apply(spark, identifier)
+        maybeTable.map(_.copy(catalog = catalog))
+      case _ => None
+    }
   }
 }
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 2d2b8ed9a..b08169d39 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -446,8 +446,7 @@ object TableCommands {
 
   val DropTableV2 = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.DropTable"
-    val tableDesc1 = resolvedTableDesc
-    TableCommandSpec(cmd, Seq(tableDesc1), DROPTABLE)
+    TableCommandSpec(cmd, Seq(resolvedIdentifierTableDesc, resolvedTableDesc), DROPTABLE)
   }
 
   val MergeIntoTable = {