You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by yi...@apache.org on 2023/03/16 06:26:31 UTC
[kyuubi] branch master updated: [KYUUBI #4532] [AUTHZ] Displays the columns involved in extracting the aggregation operator
This is an automated email from the ASF dual-hosted git repository.
yikaifei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 709899f86 [KYUUBI #4532] [AUTHZ] Displays the columns involved in extracting the aggregation operator
709899f86 is described below
commit 709899f8666a4cf23a3b504f288a031b0fddfd17
Author: Yikf <yi...@apache.org>
AuthorDate: Thu Mar 16 14:26:20 2023 +0800
[KYUUBI #4532] [AUTHZ] Displays the columns involved in extracting the aggregation operator
### _Why are the changes needed?_
This PR aims to display the columns involved in extracting the aggregation operator.
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request
Closes #4532 from Yikf/agg-authZ.
Closes #4532
6a3468f11 [Yikf] Displays the columns involved in extracting the aggregation operator
Authored-by: Yikf <yi...@apache.org>
Signed-off-by: Yikf <yi...@apache.org>
---
.../plugin/spark/authz/PrivilegesBuilder.scala | 6 ++++
.../spark/authz/PrivilegesBuilderSuite.scala | 42 ++++++++++++++++++++++
2 files changed, 48 insertions(+)
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index 51f5694e1..b8220ea27 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -95,6 +95,12 @@ object PrivilegesBuilder {
val cols = conditionList ++ sortCols
buildQuery(s.child, privilegeObjects, projectionList, cols, spark)
+ case a: Aggregate =>
+ val aggCols =
+ (a.aggregateExpressions ++ a.groupingExpressions).flatMap(e => collectLeaves(e))
+ val cols = conditionList ++ aggCols
+ buildQuery(a.child, privilegeObjects, projectionList, cols, spark)
+
case scan if isKnownScan(scan) && scan.resolved =>
getScanSpec(scan).tables(scan, spark).foreach(mergeProjection(_, scan))
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index b014aaaca..439290917 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1645,6 +1645,48 @@ class HiveCatalogPrivilegeBuilderSuite extends PrivilegesBuilderSuite {
val accessType = ranger.AccessType(po, operationType, isInput = false)
assert(accessType === AccessType.CREATE)
}
+
+ test("KYUUBI #4532: Displays the columns involved in extracting the aggregation operator") {
+ // case1: There is no project operator involving all columns.
+ val plan1 = sql(s"SELECT COUNT(key), MAX(value) FROM $reusedPartTable GROUP BY pid")
+ .queryExecution.optimizedPlan
+ val (in1, out1, _) = PrivilegesBuilder.build(plan1, spark)
+ assert(in1.size === 1)
+ assert(out1.isEmpty)
+ val pi1 = in1.head
+ assert(pi1.columns.size === 3)
+ assert(pi1.columns === Seq("key", "value", "pid"))
+
+ // case2: Some columns are involved, and the group column is not selected.
+ val plan2 = sql(s"SELECT COUNT(key) FROM $reusedPartTable GROUP BY pid")
+ .queryExecution.optimizedPlan
+ val (in2, out2, _) = PrivilegesBuilder.build(plan2, spark)
+ assert(in2.size === 1)
+ assert(out2.isEmpty)
+ val pi2 = in2.head
+ assert(pi2.columns.size === 2)
+ assert(pi2.columns === Seq("key", "pid"))
+
+ // case3: Some columns are involved, and the group column is selected.
+ val plan3 = sql(s"SELECT COUNT(key), pid FROM $reusedPartTable GROUP BY pid")
+ .queryExecution.optimizedPlan
+ val (in3, out3, _) = PrivilegesBuilder.build(plan3, spark)
+ assert(in3.size === 1)
+ assert(out3.isEmpty)
+ val pi3 = in3.head
+ assert(pi3.columns.size === 2)
+ assert(pi3.columns === Seq("key", "pid"))
+
+ // case4: HAVING & GROUP clause
+ val plan4 = sql(s"SELECT COUNT(key) FROM $reusedPartTable GROUP BY pid HAVING MAX(key) > 1000")
+ .queryExecution.optimizedPlan
+ val (in4, out4, _) = PrivilegesBuilder.build(plan4, spark)
+ assert(in4.size === 1)
+ assert(out4.isEmpty)
+ val pi4 = in4.head
+ assert(pi4.columns.size === 2)
+ assert(pi4.columns === Seq("key", "pid"))
+ }
}
case class SimpleInsert(userSpecifiedSchema: StructType)(@transient val sparkSession: SparkSession)