You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by cs...@apache.org on 2022/06/23 14:36:21 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #2929] Kyuubi integrated Ranger does not support the CTAS syntax

This is an automated email from the ASF dual-hosted git repository.

csy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 7460e745c [KYUUBI #2929] Kyuubi integrated Ranger does not support the CTAS syntax
7460e745c is described below

commit 7460e745c3e5c5efc377a69004572037b5e2fef4
Author: Min Zhao <zh...@163.com>
AuthorDate: Thu Jun 23 22:35:58 2022 +0800

    [KYUUBI #2929] Kyuubi integrated Ranger does not support the CTAS syntax
    
    ### _Why are the changes needed?_
    
    close https://github.com/apache/incubator-kyuubi/issues/2929#issue-1279486176
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #2931 from zhaomin1423/test_ranger.
    
    Closes #2929
    
    36218c46 [Min Zhao] [KYUUBI #2929] Kyuubi integrated Ranger does not support the CTAS syntax
    d9cca71f [Min Zhao] [Bug] Kyuubi integrated Ranger does not support the CTAS syntax
    
    Authored-by: Min Zhao <zh...@163.com>
    Signed-off-by: Shaoyun Chen <cs...@apache.org>
---
 .../plugin/spark/authz/PrivilegesBuilder.scala     |  6 +++---
 .../spark/authz/PrivilegesBuilderSuite.scala       | 22 ++++++++++++++++++++++
 2 files changed, 25 insertions(+), 3 deletions(-)

diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index da7e7cfdb..e6090d327 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -324,13 +324,13 @@ object PrivilegesBuilder {
         // fixme: do we need to add columns to check?
         outputObjs += tablePrivileges(table)
 
-      case "CreateDataSourceTableAsSelectCommand" |
-          "OptimizedCreateHiveTableAsSelectCommand" =>
+      case "CreateDataSourceTableAsSelectCommand" =>
         val table = getPlanField[CatalogTable]("table").identifier
         outputObjs += tablePrivileges(table)
         buildQuery(getQuery, inputObjs)
 
-      case "CreateHiveTableAsSelectCommand" =>
+      case "CreateHiveTableAsSelectCommand" |
+          "OptimizedCreateHiveTableAsSelectCommand" =>
         val table = getPlanField[CatalogTable]("tableDesc").identifier
         val cols = getPlanField[Seq[String]]("outputColumnNames")
         outputObjs += tablePrivileges(table, cols)
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index afd49a657..f99c1beb0 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1573,6 +1573,28 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
       assert(tuple._2.size === 0)
     }
   }
+
+  test("OptimizedCreateHiveTableAsSelectCommand") {
+    assume(!isSparkV2)
+    val plan = sql(
+      s"CREATE TABLE OptimizedCreateHiveTableAsSelectCommand STORED AS parquet AS SELECT 1 as a")
+      .queryExecution.analyzed
+    val operationType = OperationType(plan.nodeName)
+
+    assert(operationType === CREATETABLE_AS_SELECT)
+    val tuple = PrivilegesBuilder.build(plan, spark)
+    assert(tuple._1.size === 0)
+
+    assert(tuple._2.size === 1)
+    val po = tuple._2.head
+    assert(po.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+    assert(po.dbname === "default")
+    assert(po.objectName === "OptimizedCreateHiveTableAsSelectCommand")
+    assert(po.columns === Seq("a"))
+    val accessType = ranger.AccessType(po, operationType, isInput = false)
+    assert(accessType === AccessType.CREATE)
+  }
 }
 
 case class SimpleInsert(userSpecifiedSchema: StructType)(@transient val sparkSession: SparkSession)