You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ya...@apache.org on 2022/04/17 12:23:56 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #2399] Fix PrivilegesBuilder Build Wrong PrivilegeObjets When Query Without Project But With OrderBy/PartitionBy

This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 91adc3fd8 [KYUUBI #2399] Fix PrivilegesBuilder Build Wrong PrivilegeObjets When Query Without Project But With OrderBy/PartitionBy
91adc3fd8 is described below

commit 91adc3fd8a7b0b386126d1b702f1dc30946bde63
Author: Deng An <36...@users.noreply.github.com>
AuthorDate: Sun Apr 17 20:23:18 2022 +0800

    [KYUUBI #2399] Fix PrivilegesBuilder Build Wrong PrivilegeObjets When Query Without Project But With OrderBy/PartitionBy
    
    ### _Why are the changes needed?_
    
    To close #2399
    `PrivilegesBuilder#buildQuery()` should have more cases to cover some ***NO PROJECT*** situation.
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #2400 from packyan/bugfix_privileges_builder_return_wrong_result.
    
    Closes #2399
    
    a4ad182c [Deng An] add more unit test for PrivilegesBuilder
    59fbde3c [Deng An] add more cases in buildQuery method
    
    Authored-by: Deng An <36...@users.noreply.github.com>
    Signed-off-by: Kent Yao <ya...@apache.org>
---
 .../plugin/spark/authz/PrivilegesBuilder.scala     |  13 +-
 .../spark/authz/PrivilegesBuilderSuite.scala       | 139 ++++++++++++++++++---
 2 files changed, 133 insertions(+), 19 deletions(-)

diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index b0ea52b49..a17187432 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.catalog.CatalogTable
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
 import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression}
-import org.apache.spark.sql.catalyst.plans.logical.{Command, Filter, Join, LogicalPlan, Project}
+import org.apache.spark.sql.catalyst.plans.logical.{Command, Filter, Join, LogicalPlan, Project, Sort, Window}
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 import org.apache.spark.sql.types.StructField
 
@@ -104,6 +104,17 @@ object PrivilegesBuilder {
         val cols = conditionList ++ collectLeaves(f.condition)
         buildQuery(f.child, privilegeObjects, projectionList, cols)
 
+      case w: Window =>
+        val orderCols = w.orderSpec.flatMap(orderSpec => collectLeaves(orderSpec))
+        val partitionCols = w.partitionSpec.flatMap(partitionSpec => collectLeaves(partitionSpec))
+        val cols = conditionList ++ orderCols ++ partitionCols
+        buildQuery(w.child, privilegeObjects, projectionList, cols)
+
+      case s: Sort =>
+        val sortCols = s.order.flatMap(sortOrder => collectLeaves(sortOrder))
+        val cols = conditionList ++ sortCols
+        buildQuery(s.child, privilegeObjects, projectionList, cols)
+
       case hiveTableRelation if hasResolvedHiveTable(hiveTableRelation) =>
         mergeProjection(getHiveTable(hiveTableRelation), hiveTableRelation)
 
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index fda9e2814..ee9ce44fc 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -165,24 +165,6 @@ abstract class PrivilegesBuilderSuite extends KyuubiFunSuite with SparkSessionPr
     }
   }
 
-  test("AlterTableAddColumnsCommand") {
-    val plan = sql(s"ALTER TABLE $reusedTable" +
-      s" ADD COLUMNS (a int)").queryExecution.analyzed
-    val operationType = OperationType(plan.nodeName)
-    assert(operationType === ALTERTABLE_ADDCOLS)
-    val tuple = PrivilegesBuilder.build(plan)
-    assert(tuple._1.isEmpty)
-    assert(tuple._2.size === 1)
-    val po = tuple._2.head
-    assert(po.actionType === PrivilegeObjectActionType.OTHER)
-    assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
-    assert(po.dbname equalsIgnoreCase reusedDb)
-    assert(po.objectName === getClass.getSimpleName)
-    assert(po.columns.head === "a")
-    val accessType = ranger.AccessType(po, operationType, isInput = false)
-    assert(accessType === AccessType.ALTER)
-  }
-
   test("AlterTableAddPartitionCommand") {
     val plan = sql(s"ALTER TABLE $reusedPartTable ADD IF NOT EXISTS PARTITION (pid=1)")
       .queryExecution.analyzed
@@ -891,6 +873,58 @@ abstract class PrivilegesBuilderSuite extends KyuubiFunSuite with SparkSessionPr
       Seq("value", "pid"))
   }
 
+  test("Query: Subquery With Window") {
+    val plan = sql(
+      s"""
+         |SELECT value, rank FROM(
+         |SELECT *,
+         |RANK() OVER (PARTITION BY key ORDER BY pid) AS rank
+         |FROM $reusedPartTable)
+         |""".stripMargin).queryExecution.optimizedPlan
+    val operationType = OperationType(plan.nodeName)
+    assert(operationType === QUERY)
+    val tuple = PrivilegesBuilder.build(plan)
+    assert(tuple._1.size === 1)
+    tuple._1.foreach { po =>
+      assert(po.actionType === PrivilegeObjectActionType.OTHER)
+      assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+      assert(po.dbname equalsIgnoreCase reusedDb)
+      assert(po.objectName startsWith reusedTableShort.toLowerCase)
+      assert(
+        po.columns === Seq("value", "pid", "key"),
+        s"$reusedPartTable both 'key', 'value' and 'pid' should be authenticated")
+      val accessType = ranger.AccessType(po, operationType, isInput = true)
+      assert(accessType === AccessType.SELECT)
+    }
+    assert(tuple._2.size === 0)
+  }
+
+  test("Query: Subquery With Order") {
+    val plan = sql(
+      s"""
+         |SELECT value FROM(
+         |SELECT *
+         |FROM $reusedPartTable
+         |ORDER BY key, pid)
+         |""".stripMargin).queryExecution.optimizedPlan
+    val operationType = OperationType(plan.nodeName)
+    assert(operationType === QUERY)
+    val tuple = PrivilegesBuilder.build(plan)
+    assert(tuple._1.size === 1)
+    tuple._1.foreach { po =>
+      assert(po.actionType === PrivilegeObjectActionType.OTHER)
+      assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+      assert(po.dbname equalsIgnoreCase reusedDb)
+      assert(po.objectName startsWith reusedTableShort.toLowerCase)
+      assert(
+        po.columns === Seq("value", "key", "pid"),
+        s"$reusedPartTable both 'key', 'value' and 'pid' should be authenticated")
+      val accessType = ranger.AccessType(po, operationType, isInput = true)
+      assert(accessType === AccessType.SELECT)
+    }
+    assert(tuple._2.size === 0)
+  }
+
   test("Query: INNER JOIN") {
     val sqlStr =
       s"""
@@ -977,6 +1011,56 @@ abstract class PrivilegesBuilderSuite extends KyuubiFunSuite with SparkSessionPr
     assert(tuple._2.size === 0)
   }
 
+  test("Query: Window Without Project") {
+    val plan = sql(
+      s"""
+         |SELECT key,
+         |RANK() OVER (PARTITION BY key ORDER BY value) AS rank
+         |FROM $reusedTable
+         |""".stripMargin).queryExecution.optimizedPlan
+    val operationType = OperationType(plan.nodeName)
+    assert(operationType === QUERY)
+    val tuple = PrivilegesBuilder.build(plan)
+    assert(tuple._1.size === 1)
+    tuple._1.foreach { po =>
+      assert(po.actionType === PrivilegeObjectActionType.OTHER)
+      assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+      assert(po.dbname equalsIgnoreCase reusedDb)
+      assert(po.objectName startsWith reusedTableShort.toLowerCase)
+      assert(
+        po.columns === Seq("key", "value"),
+        s"$reusedPartTable both 'key' and 'value' should be authenticated")
+      val accessType = ranger.AccessType(po, operationType, isInput = true)
+      assert(accessType === AccessType.SELECT)
+    }
+    assert(tuple._2.size === 0)
+  }
+
+  test("Query: Order By Without Project") {
+    val plan = sql(
+      s"""
+         |SELECT key
+         |FROM $reusedPartTable
+         |ORDER BY key, value, pid
+         |""".stripMargin).queryExecution.optimizedPlan
+    val operationType = OperationType(plan.nodeName)
+    assert(operationType === QUERY)
+    val tuple = PrivilegesBuilder.build(plan)
+    assert(tuple._1.size === 1)
+    tuple._1.foreach { po =>
+      assert(po.actionType === PrivilegeObjectActionType.OTHER)
+      assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+      assert(po.dbname equalsIgnoreCase reusedDb)
+      assert(po.objectName startsWith reusedTableShort.toLowerCase)
+      assert(
+        po.columns === Seq("key", "value", "pid"),
+        s"$reusedPartTable both 'key', 'value' and 'pid' should be authenticated")
+      val accessType = ranger.AccessType(po, operationType, isInput = true)
+      assert(accessType === AccessType.SELECT)
+    }
+    assert(tuple._2.size === 0)
+  }
+
   test("Query: Union") {
     val plan = sql(
       s"""
@@ -1155,4 +1239,23 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
       assert(tuple._2.size === 0)
     }
   }
+// ALTER TABLE ADD COLUMNS It should be run at end
+// to prevent affecting the cases that rely on the $reusedTable's table structure
+  test("AlterTableAddColumnsCommand") {
+    val plan = sql(s"ALTER TABLE $reusedTable" +
+      s" ADD COLUMNS (a int)").queryExecution.analyzed
+    val operationType = OperationType(plan.nodeName)
+    assert(operationType === ALTERTABLE_ADDCOLS)
+    val tuple = PrivilegesBuilder.build(plan)
+    assert(tuple._1.isEmpty)
+    assert(tuple._2.size === 1)
+    val po = tuple._2.head
+    assert(po.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+    assert(po.dbname equalsIgnoreCase reusedDb)
+    assert(po.objectName === getClass.getSimpleName)
+    assert(po.columns.head === "a")
+    val accessType = ranger.AccessType(po, operationType, isInput = false)
+    assert(accessType === AccessType.ALTER)
+  }
 }