You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by bo...@apache.org on 2023/03/10 15:04:16 UTC

[kyuubi] branch master updated: [KYUUBI #4493] [AUTHZ][TEST] Enable Tests for Spark 3.1 with iceberg tables

This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 1bef628ab [KYUUBI #4493] [AUTHZ][TEST] Enable Tests for Spark 3.1 with iceberg tables
1bef628ab is described below

commit 1bef628ab365e035a6baaf5c3d66f6efdece7e06
Author: Kent Yao <ya...@apache.org>
AuthorDate: Fri Mar 10 23:04:04 2023 +0800

    [KYUUBI #4493] [AUTHZ][TEST] Enable Tests for Spark 3.1 with iceberg tables
    
    ### _Why are the changes needed?_
    
    Enable Tests for Spark 3.1 with iceberg tables
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #4493 from yaooqinn/it.
    
    Closes #4493
    
    b40e9de05 [Kent Yao] [AUTHZ][TEST] Enable Tests for Spark 3.1 with iceberg tables
    29a94addd [Kent Yao] [AUTHZ][TEST] Enable Tests for Spark 3.1 with iceberg tables
    
    Authored-by: Kent Yao <ya...@apache.org>
    Signed-off-by: liangbowen <li...@gf.com.cn>
---
 .../IcebergCatalogPrivilegesBuilderSuite.scala      |  6 +++---
 .../IcebergCatalogRangerSparkExtensionSuite.scala   | 21 +++++++++------------
 2 files changed, 12 insertions(+), 15 deletions(-)

diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/IcebergCatalogPrivilegesBuilderSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/IcebergCatalogPrivilegesBuilderSuite.scala
index d89d0696f..813970389 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/IcebergCatalogPrivilegesBuilderSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/IcebergCatalogPrivilegesBuilderSuite.scala
@@ -26,7 +26,7 @@ import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType
 class IcebergCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite {
   override protected val catalogImpl: String = "hive"
   override protected val sqlExtensions: String =
-    if (isSparkV32OrGreater) {
+    if (isSparkV31OrGreater) {
       "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
     } else ""
   override protected def format = "iceberg"
@@ -38,7 +38,7 @@ class IcebergCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite {
   override protected val supportsPartitionManagement = false
 
   override def beforeAll(): Unit = {
-    if (isSparkV32OrGreater) {
+    if (isSparkV31OrGreater) {
       spark.conf.set(
         s"spark.sql.catalog.$catalogV2",
         "org.apache.iceberg.spark.SparkCatalog")
@@ -51,7 +51,7 @@ class IcebergCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite {
   }
 
   override def withFixture(test: NoArgTest): Outcome = {
-    assume(isSparkV32OrGreater)
+    assume(isSparkV31OrGreater)
     test()
   }
 
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
index 909c26d36..6b1cedf78 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
@@ -19,6 +19,8 @@ package org.apache.kyuubi.plugin.spark.authz.ranger
 // scalastyle:off
 import scala.util.Try
 
+import org.scalatest.Outcome
+
 import org.apache.kyuubi.Utils
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException
 
@@ -29,7 +31,7 @@ import org.apache.kyuubi.plugin.spark.authz.AccessControlException
 class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
   override protected val catalogImpl: String = "hive"
   override protected val sqlExtensions: String =
-    if (isSparkV32OrGreater)
+    if (isSparkV31OrGreater)
       "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
     else ""
 
@@ -38,8 +40,13 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
   val table1 = "table1"
   val outputTable1 = "outputTable1"
 
+  override def withFixture(test: NoArgTest): Outcome = {
+    assume(isSparkV31OrGreater)
+    test()
+  }
+
   override def beforeAll(): Unit = {
-    if (isSparkV32OrGreater) {
+    if (isSparkV31OrGreater) {
       spark.conf.set(
         s"spark.sql.catalog.$catalogV2",
         "org.apache.iceberg.spark.SparkCatalog")
@@ -74,8 +81,6 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
   }
 
   test("[KYUUBI #3515] MERGE INTO") {
-    assume(isSparkV32OrGreater)
-
     val mergeIntoSql =
       s"""
          |MERGE INTO $catalogV2.$namespace1.$outputTable1 AS target
@@ -115,8 +120,6 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
   }
 
   test("[KYUUBI #3515] UPDATE TABLE") {
-    assume(isSparkV32OrGreater)
-
     // UpdateTable
     val e1 = intercept[AccessControlException](
       doAs(
@@ -133,8 +136,6 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
   }
 
   test("[KYUUBI #3515] DELETE FROM TABLE") {
-    assume(isSparkV32OrGreater)
-
     // DeleteFromTable
     val e6 = intercept[AccessControlException](
       doAs("someone", sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE id=2")))
@@ -145,8 +146,6 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
   }
 
   test("[KYUUBI #3666] Support {OWNER} variable for queries run on CatalogV2") {
-    assume(isSparkV32OrGreater)
-
     val table = "owner_variable"
     val select = s"SELECT key FROM $catalogV2.$namespace1.$table"
 
@@ -224,11 +223,9 @@ class IcebergCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
   }
 
   test("[KYUUBI #4255] DESCRIBE TABLE") {
-    assume(isSparkV32OrGreater)
     val e1 = intercept[AccessControlException](
       doAs("someone", sql(s"DESCRIBE TABLE $catalogV2.$namespace1.$table1").explain()))
     assert(e1.getMessage.contains(s"does not have [select] privilege" +
       s" on [$namespace1/$table1]"))
   }
-
 }