You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ya...@apache.org on 2023/01/10 07:26:51 UTC

[kyuubi] branch master updated: [KYUUBI #4134] [Authz] Code improvement in Authz Serde extractors and commands

This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 2fef96654 [KYUUBI #4134] [Authz] Code improvement in Authz Serde extractors and commands
2fef96654 is described below

commit 2fef966544d932b0650f473a451880400c4bd252
Author: liangbowen <li...@gf.com.cn>
AuthorDate: Tue Jan 10 15:26:40 2023 +0800

    [KYUUBI #4134] [Authz] Code improvement in Authz Serde extractors and commands
    
    ### _Why are the changes needed?_
    
    Code imporements in Authz's Serde:
    1. generalize `loadExtractorsToMap` method for loading extractors from SPI
    2. avoid usage of magic values of types name by adding implicit `actionTypeStr`/`functionTypeStr`/`tableTypeStr` for converting `PrivilegeObjectActionType`/`FunctionType`/`TableType` to String
    3. simplify `ActionTypeDesc` constructor with default values
    4. add class hints in method comments of `extractors`
    5. print specs count and file name in `JsonSpecFileGenerator.writeCommandSpecJson`
    
    No new feature is introduced in this PR.
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #4134 from bowenliang123/authz-serde-update.
    
    Closes #4134
    
    c84ee1f88 [liangbowen] print spec counts in `writeSpecJsonFile` method and move commandType to first param
    03801498e [liangbowen] introduce `queryQueryDesc` field in TableCommands
    78c3b3bee [liangbowen] fix import
    5ef7974ef [liangbowen] add implicit tableTypeStr for converting TableType to String
    fea35edfa [liangbowen] add implicit functionTypeStr for converting FunctionType to String
    8321518b7 [liangbowen] fix imports
    48d3fbf04 [liangbowen] fix imports
    51950c9a5 [liangbowen] generalize `loadExtractorsToMap` method
    a64322083 [liangbowen] add class hints for method comments of extractors
    eadba1f2b [liangbowen] simplify ActionTypeDesc contrustor with default values
    8829c66d7 [liangbowen] add implicit `actionTypeStr` for converting PrivilegeObjectActionType to String
    
    Authored-by: liangbowen <li...@gf.com.cn>
    Signed-off-by: Kent Yao <ya...@apache.org>
---
 .../plugin/spark/authz/serde/Descriptor.scala      |  4 +-
 .../spark/authz/serde/actionTypeExtractors.scala   | 13 ++-----
 .../spark/authz/serde/catalogExtractors.scala      | 10 +----
 .../spark/authz/serde/columnExtractors.scala       | 13 ++-----
 .../spark/authz/serde/databaseExtractors.scala     | 13 ++-----
 .../spark/authz/serde/functionExtractors.scala     | 19 +++++-----
 .../spark/authz/serde/functionTypeExtractors.scala | 16 ++++----
 .../kyuubi/plugin/spark/authz/serde/package.scala  | 11 ++++++
 .../plugin/spark/authz/serde/queryExtractors.scala | 16 ++++----
 .../plugin/spark/authz/serde/tableExtractors.scala |  8 +---
 .../spark/authz/serde/tableTypeExtractors.scala    | 13 ++-----
 .../plugin/spark/authz/gen/DatabaseCommands.scala  |  2 +-
 .../plugin/spark/authz/gen/FunctionCommands.scala  |  7 ++--
 .../plugin/spark/authz/gen/IcebergCommands.scala   |  7 ++--
 .../spark/authz/gen/JsonSpecFileGenerator.scala    | 19 ++++++----
 .../kyuubi/plugin/spark/authz/gen/Scans.scala      |  2 +-
 .../plugin/spark/authz/gen/TableCommands.scala     | 44 +++++++++++-----------
 .../kyuubi/plugin/spark/authz/gen/package.scala    |  9 +++++
 18 files changed, 109 insertions(+), 117 deletions(-)

diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
index fd355e348..d8c866b88 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
@@ -261,8 +261,8 @@ case class TableDesc(
  * @param actionType the explicitly given action type which take precedence over extracting
  */
 case class ActionTypeDesc(
-    fieldName: String,
-    fieldExtractor: String,
+    fieldName: String = null,
+    fieldExtractor: String = null,
     actionType: Option[String] = None) extends Descriptor {
   override def extract(v: AnyRef): PrivilegeObjectActionType = {
     actionType.map(PrivilegeObjectActionType.withName).getOrElse {
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/actionTypeExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/actionTypeExtractors.scala
index 3b808b18c..0c51f6875 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/actionTypeExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/actionTypeExtractors.scala
@@ -17,10 +17,6 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql.SaveMode.{Append, Overwrite}
 
 import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
@@ -29,14 +25,13 @@ trait ActionTypeExtractor extends (AnyRef => PrivilegeObjectActionType) with Ext
 
 object ActionTypeExtractor {
   val actionTypeExtractors: Map[String, ActionTypeExtractor] = {
-    ServiceLoader.load(classOf[ActionTypeExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[ActionTypeExtractor]
   }
 }
 
+/**
+ * Boolean
+ */
 class OverwriteOrInsertActionTypeExtractor extends ActionTypeExtractor {
   override def apply(v1: AnyRef): PrivilegeObjectActionType = {
     if (v1.asInstanceOf[Boolean]) INSERT_OVERWRITE else INSERT
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/catalogExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/catalogExtractors.scala
index a19147090..0b7d71223 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/catalogExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/catalogExtractors.scala
@@ -17,21 +17,13 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 
 trait CatalogExtractor extends (AnyRef => Option[String]) with Extractor
 
 object CatalogExtractor {
   val catalogExtractors: Map[String, CatalogExtractor] = {
-    ServiceLoader.load(classOf[CatalogExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[CatalogExtractor]
   }
 }
 
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/columnExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/columnExtractors.scala
index 23ef665d8..721b85688 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/columnExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/columnExtractors.scala
@@ -17,21 +17,13 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql.types.StructField
 
 trait ColumnExtractor extends (AnyRef => Seq[String]) with Extractor
 
 object ColumnExtractor {
   val columnExtractors: Map[String, ColumnExtractor] = {
-    ServiceLoader.load(classOf[ColumnExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[ColumnExtractor]
   }
 }
 class StringColumnExtractor extends ColumnExtractor {
@@ -58,6 +50,9 @@ class StringSeqOptionColumnExtractor extends ColumnExtractor {
   }
 }
 
+/**
+ * org.apache.spark.sql.types.StructField
+ */
 class StructFieldSeqColumnExtractor extends ColumnExtractor {
   override def apply(v1: AnyRef): Seq[String] = {
     v1.asInstanceOf[Seq[StructField]].map(_.name)
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/databaseExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/databaseExtractors.scala
index adb04ea60..4e9270e78 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/databaseExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/databaseExtractors.scala
@@ -17,21 +17,13 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 
 trait DatabaseExtractor extends (AnyRef => Database) with Extractor
 
 object DatabaseExtractor {
   val dbExtractors: Map[String, DatabaseExtractor] = {
-    ServiceLoader.load(classOf[DatabaseExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[DatabaseExtractor]
   }
 }
 
@@ -83,6 +75,9 @@ class ResolvedNamespaceDatabaseExtractor extends DatabaseExtractor {
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.analysis.ResolvedDbObjectName
+ */
 class ResolvedDBObjectNameDatabaseExtractor extends DatabaseExtractor {
   override def apply(v1: AnyRef): Database = {
     val catalogVal = invoke(v1, "catalog")
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
index 971203a59..894a6cb8f 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
@@ -17,10 +17,6 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql.catalyst.FunctionIdentifier
 import org.apache.spark.sql.catalyst.expressions.ExpressionInfo
 
@@ -28,20 +24,22 @@ trait FunctionExtractor extends (AnyRef => Function) with Extractor
 
 object FunctionExtractor {
   val functionExtractors: Map[String, FunctionExtractor] = {
-    ServiceLoader.load(classOf[FunctionExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[FunctionExtractor]
   }
 }
 
+/**
+ * String
+ */
 class StringFunctionExtractor extends FunctionExtractor {
   override def apply(v1: AnyRef): Function = {
     Function(None, v1.asInstanceOf[String])
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.FunctionIdentifier
+ */
 class FunctionIdentifierFunctionExtractor extends FunctionExtractor {
   override def apply(v1: AnyRef): Function = {
     val identifier = v1.asInstanceOf[FunctionIdentifier]
@@ -49,6 +47,9 @@ class FunctionIdentifierFunctionExtractor extends FunctionExtractor {
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.expressions.ExpressionInfo
+ */
 class ExpressionInfoFunctionExtractor extends FunctionExtractor {
   override def apply(v1: AnyRef): Function = {
     val info = v1.asInstanceOf[ExpressionInfo]
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
index 4509ed6de..4c5e9dc84 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
@@ -17,10 +17,6 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.FunctionIdentifier
 
@@ -35,11 +31,7 @@ trait FunctionTypeExtractor extends ((AnyRef, SparkSession) => FunctionType) wit
 
 object FunctionTypeExtractor {
   val functionTypeExtractors: Map[String, FunctionTypeExtractor] = {
-    ServiceLoader.load(classOf[FunctionTypeExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[FunctionTypeExtractor]
   }
 }
 
@@ -56,6 +48,9 @@ class TempMarkerFunctionTypeExtractor extends FunctionTypeExtractor {
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.expressions.ExpressionInfo
+ */
 class ExpressionInfoFunctionTypeExtractor extends FunctionTypeExtractor {
   override def apply(v1: AnyRef, spark: SparkSession): FunctionType = {
     val function = new ExpressionInfoFunctionExtractor().apply(v1)
@@ -64,6 +59,9 @@ class ExpressionInfoFunctionTypeExtractor extends FunctionTypeExtractor {
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.FunctionIdentifier
+ */
 class FunctionIdentifierFunctionTypeExtractor extends FunctionTypeExtractor {
   override def apply(v1: AnyRef, spark: SparkSession): FunctionType = {
     val catalog = spark.sessionState.catalog
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
index bfc719f22..a52a558a0 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
@@ -17,6 +17,11 @@
 
 package org.apache.kyuubi.plugin.spark.authz
 
+import java.util.ServiceLoader
+
+import scala.collection.JavaConverters._
+import scala.reflect.ClassTag
+
 import com.fasterxml.jackson.core.`type`.TypeReference
 import com.fasterxml.jackson.databind.json.JsonMapper
 import com.fasterxml.jackson.module.scala.DefaultScalaModule
@@ -28,6 +33,12 @@ package object serde {
 
   final val mapper = JsonMapper.builder().addModule(DefaultScalaModule).build()
 
+  def loadExtractorsToMap[T <: Extractor](implicit ct: ClassTag[T]): Map[String, T] = {
+    ServiceLoader.load(ct.runtimeClass).iterator().asScala
+      .map { case e: Extractor => (e.key, e.asInstanceOf[T]) }
+      .toMap
+  }
+
   final lazy val DB_COMMAND_SPECS: Map[String, DatabaseCommandSpec] = {
     val is = getClass.getClassLoader.getResourceAsStream("database_command_spec.json")
     mapper.readValue(is, new TypeReference[Array[DatabaseCommandSpec]] {})
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala
index d8e26f09a..f6fc19ac2 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/queryExtractors.scala
@@ -17,30 +17,28 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 
 trait QueryExtractor extends (AnyRef => Option[LogicalPlan]) with Extractor
 
 object QueryExtractor {
   val queryExtractors: Map[String, QueryExtractor] = {
-    ServiceLoader.load(classOf[QueryExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[QueryExtractor]
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+ */
 class LogicalPlanQueryExtractor extends QueryExtractor {
   override def apply(v1: AnyRef): Option[LogicalPlan] = {
     Some(v1.asInstanceOf[LogicalPlan])
   }
 }
 
+/**
+ * Option[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]
+ */
 class LogicalPlanOptionQueryExtractor extends QueryExtractor {
   override def apply(v1: AnyRef): Option[LogicalPlan] = {
     v1.asInstanceOf[Option[LogicalPlan]]
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
index 0aef40ced..c848381d4 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableExtractors.scala
@@ -17,7 +17,7 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.{Map => JMap, ServiceLoader}
+import java.util.{Map => JMap}
 
 import scala.collection.JavaConverters._
 
@@ -36,11 +36,7 @@ trait TableExtractor extends ((SparkSession, AnyRef) => Option[Table]) with Extr
 
 object TableExtractor {
   val tableExtractors: Map[String, TableExtractor] = {
-    ServiceLoader.load(classOf[TableExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[TableExtractor]
   }
 
   /**
diff --git a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableTypeExtractors.scala b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableTypeExtractors.scala
index 62ad46944..b6f93b8d7 100644
--- a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableTypeExtractors.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/tableTypeExtractors.scala
@@ -17,10 +17,6 @@
 
 package org.apache.kyuubi.plugin.spark.authz.serde
 
-import java.util.ServiceLoader
-
-import scala.collection.JavaConverters._
-
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.TableIdentifier
 
@@ -35,11 +31,7 @@ trait TableTypeExtractor extends ((AnyRef, SparkSession) => TableType) with Extr
 
 object TableTypeExtractor {
   val tableTypeExtractors: Map[String, TableTypeExtractor] = {
-    ServiceLoader.load(classOf[TableTypeExtractor])
-      .iterator()
-      .asScala
-      .map(e => (e.key, e))
-      .toMap
+    loadExtractorsToMap[TableTypeExtractor]
   }
 }
 
@@ -56,6 +48,9 @@ class ViewTypeTableTypeExtractor extends TableTypeExtractor {
   }
 }
 
+/**
+ * org.apache.spark.sql.catalyst.TableIdentifier
+ */
 class TableIdentifierTableTypeExtractor extends TableTypeExtractor {
   override def apply(v1: AnyRef, spark: SparkSession): TableType = {
     val catalog = spark.sessionState.catalog
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
index 09ff916a0..e947579e9 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
@@ -132,7 +132,7 @@ object DatabaseCommands {
     DatabaseCommandSpec(cmd, Seq(databaseDesc), DESCDATABASE)
   }
 
-  val data = Array(
+  val data: Array[DatabaseCommandSpec] = Array(
     AlterDatabaseProperties,
     AlterDatabaseProperties.copy(
       classname = "org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand",
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
index 590ad1786..46c7f0efa 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
@@ -19,6 +19,7 @@ package org.apache.kyuubi.plugin.spark.authz.gen
 
 import org.apache.kyuubi.plugin.spark.authz.OperationType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
+import org.apache.kyuubi.plugin.spark.authz.serde.FunctionType.{SYSTEM, TEMP}
 
 object FunctionCommands {
 
@@ -27,7 +28,7 @@ object FunctionCommands {
     val functionTypeDesc = FunctionTypeDesc(
       "isTemp",
       classOf[TempMarkerFunctionTypeExtractor],
-      Seq("TEMP"))
+      Seq(TEMP))
     val databaseDesc =
       DatabaseDesc("databaseName", classOf[StringOptionDatabaseExtractor])
     val functionDesc = FunctionDesc(
@@ -40,7 +41,7 @@ object FunctionCommands {
 
   val DescribeFunction = {
     val cmd = "org.apache.spark.sql.execution.command.DescribeFunctionCommand"
-    val skips = Seq("TEMP", "SYSTEM")
+    val skips: Seq[String] = Seq(TEMP, SYSTEM)
     val functionTypeDesc1 =
       FunctionTypeDesc("info", classOf[ExpressionInfoFunctionTypeExtractor], skips)
     val functionDesc1 = FunctionDesc(
@@ -78,7 +79,7 @@ object FunctionCommands {
     FunctionCommandSpec(cmd, Seq(functionDesc), RELOADFUNCTION)
   }
 
-  val data = Array(
+  val data: Array[FunctionCommandSpec] = Array(
     CreateFunction,
     DropFunction,
     DescribeFunction,
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
index f316daa90..208e73c51 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
@@ -17,13 +17,14 @@
 
 package org.apache.kyuubi.plugin.spark.authz.gen
 
+import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
 
 object IcebergCommands {
 
   val DeleteFromIcebergTable = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.DeleteFromIcebergTable"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("UPDATE"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
     val tableDesc =
       TableDesc(
         "table",
@@ -39,7 +40,7 @@ object IcebergCommands {
 
   val MergeIntoIcebergTable = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.MergeIntoIcebergTable"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("UPDATE"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
     val tableDesc = TableDesc(
       "targetTable",
       classOf[DataSourceV2RelationTableExtractor],
@@ -48,7 +49,7 @@ object IcebergCommands {
     TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
   }
 
-  val data = Array(
+  val data: Array[TableCommandSpec] = Array(
     DeleteFromIcebergTable,
     UpdateIcebergTable,
     MergeIntoIcebergTable,
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
index 65fa9212c..7c7ed138b 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
@@ -31,21 +31,24 @@ import org.apache.kyuubi.plugin.spark.authz.serde.{mapper, CommandSpec}
 object JsonSpecFileGenerator {
 
   def main(args: Array[String]): Unit = {
-    write(DatabaseCommands.data, "database")
-    write(TableCommands.data ++ IcebergCommands.data, "table")
-    write(FunctionCommands.data, "function")
-    write(Scans.data, "scan")
+    writeCommandSpecJson("database", DatabaseCommands.data)
+    writeCommandSpecJson("table", TableCommands.data ++ IcebergCommands.data)
+    writeCommandSpecJson("function", FunctionCommands.data)
+    writeCommandSpecJson("scan", Scans.data)
   }
 
-  def write[T <: CommandSpec](data: Array[T], filename: String): Unit = {
+  def writeCommandSpecJson[T <: CommandSpec](commandType: String, specArr: Array[T]): Unit = {
     val pluginHome = getClass.getProtectionDomain.getCodeSource.getLocation.getPath
       .split("target").head
+    val filename = s"${commandType}_command_spec.json"
     val writer = {
-      val p =
-        Paths.get(pluginHome, "src", "main", "resources", s"${filename}_command_spec.json")
+      val p = Paths.get(pluginHome, "src", "main", "resources", filename)
       Files.newBufferedWriter(p, StandardCharsets.UTF_8)
     }
-    mapper.writerWithDefaultPrettyPrinter().writeValue(writer, data.sortBy(_.classname))
+    // scalastyle:off println
+    println(s"writing ${specArr.length} specs to $filename")
+    // scalastyle:on println
+    mapper.writerWithDefaultPrettyPrinter().writeValue(writer, specArr.sortBy(_.classname))
     writer.close()
   }
 }
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
index 56a3a5819..7bd8260bb 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
@@ -57,7 +57,7 @@ object Scans {
     ScanSpec(r, Seq(tableDesc))
   }
 
-  val data = Array(
+  val data: Array[ScanSpec] = Array(
     HiveTableRelation,
     LogicalRelation,
     DataSourceV2Relation,
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 2a890c41a..ef981515a 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -18,7 +18,9 @@
 package org.apache.kyuubi.plugin.spark.authz.gen
 
 import org.apache.kyuubi.plugin.spark.authz.OperationType._
+import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
+import org.apache.kyuubi.plugin.spark.authz.serde.TableType._
 
 object TableCommands {
   // table extractors
@@ -28,9 +30,9 @@ object TableCommands {
   val resolvedTableDesc = TableDesc("child", classOf[ResolvedTableTableExtractor])
   val resolvedDbObjectNameDesc =
     TableDesc("child", classOf[ResolvedDbObjectNameTableExtractor])
-
   val overwriteActionTypeDesc =
     ActionTypeDesc("overwrite", classOf[OverwriteOrInsertActionTypeExtractor])
+  val queryQueryDesc = QueryDesc("query")
 
   val AlterTable = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.AlterTable"
@@ -100,13 +102,13 @@ object TableCommands {
 
   val AlterTableRename = {
     val cmd = "org.apache.spark.sql.execution.command.AlterTableRenameCommand"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("DELETE"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(DELETE))
 
     val oldTableTableTypeDesc =
       TableTypeDesc(
         "oldName",
         classOf[TableIdentifierTableTypeExtractor],
-        Seq("TEMP_VIEW"))
+        Seq(TEMP_VIEW))
     val oldTableD = TableDesc(
       "oldName",
       tite,
@@ -172,7 +174,7 @@ object TableCommands {
       "org.apache.spark.sql.execution.command.AlterViewAsCommand",
       Seq(TableDesc("name", tite, tableTypeDesc = Some(tableTypeDesc))),
       ALTERVIEW_AS,
-      Seq(QueryDesc("query")))
+      Seq(queryQueryDesc))
   }
 
   val AnalyzeColumn = {
@@ -229,7 +231,7 @@ object TableCommands {
       cmd,
       Seq(tableDesc, resolvedDbObjectNameDesc.copy(fieldName = "left")),
       CREATETABLE_AS_SELECT,
-      Seq(QueryDesc("query")))
+      Seq(queryQueryDesc))
   }
 
   val CommentOnTable = {
@@ -239,24 +241,24 @@ object TableCommands {
 
   val AppendDataV2 = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.AppendData"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("INSERT"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(INSERT))
     val tableDesc =
       TableDesc(
         "table",
         classOf[DataSourceV2RelationTableExtractor],
         actionTypeDesc = Some(actionTypeDesc))
-    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
+    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryQueryDesc))
   }
 
   val UpdateTable = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.UpdateTable"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("UPDATE"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
     val tableDesc =
       TableDesc(
         "table",
         classOf[DataSourceV2RelationTableExtractor],
         actionTypeDesc = Some(actionTypeDesc))
-    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
+    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryQueryDesc))
   }
 
   val DeleteFromTable = {
@@ -266,13 +268,13 @@ object TableCommands {
 
   val OverwriteByExpression = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.OverwriteByExpression"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("INSERT_OVERWRITE"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(INSERT_OVERWRITE))
     val tableDesc =
       TableDesc(
         "table",
         classOf[DataSourceV2RelationTableExtractor],
         actionTypeDesc = Some(actionTypeDesc))
-    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
+    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryQueryDesc))
   }
 
   val OverwritePartitionsDynamic = {
@@ -329,7 +331,7 @@ object TableCommands {
     val tableTypeDesc = TableTypeDesc(
       "viewType",
       classOf[ViewTypeTableTypeExtractor],
-      Seq("TEMP_VIEW", "GLOBAL_TEMP_VIEW"))
+      Seq(TEMP_VIEW, GLOBAL_TEMP_VIEW))
     val tableDesc = TableDesc(
       "name",
       classOf[TableIdentifierTableExtractor],
@@ -359,7 +361,7 @@ object TableCommands {
     CreateDataSourceTable.copy(
       classname = cmd,
       opType = CREATETABLE_AS_SELECT,
-      queryDescs = Seq(QueryDesc("query")))
+      queryDescs = Seq(queryQueryDesc))
   }
 
   val CreateHiveTableAsSelect = {
@@ -367,7 +369,7 @@ object TableCommands {
     val columnDesc = ColumnDesc("outputColumnNames", classOf[StringSeqColumnExtractor])
     val tableDesc =
       TableDesc("tableDesc", classOf[CatalogTableTableExtractor], Some(columnDesc))
-    val queryDesc = QueryDesc("query")
+    val queryDesc = queryQueryDesc
     TableCommandSpec(cmd, Seq(tableDesc), "CREATETABLE_AS_SELECT", queryDescs = Seq(queryDesc))
   }
 
@@ -414,7 +416,7 @@ object TableCommands {
       TableTypeDesc(
         "tableName",
         classOf[TableIdentifierTableTypeExtractor],
-        Seq("TEMP_VIEW"))
+        Seq(TEMP_VIEW))
     TableCommandSpec(
       cmd,
       Seq(tableNameDesc.copy(tableTypeDesc = Some(tableTypeDesc))),
@@ -429,7 +431,7 @@ object TableCommands {
 
   val MergeIntoTable = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.MergeIntoTable"
-    val actionTypeDesc = ActionTypeDesc(null, null, Some("UPDATE"))
+    val actionTypeDesc = ActionTypeDesc(actionType = Some(UPDATE))
     val tableDesc = TableDesc(
       "targetTable",
       classOf[DataSourceV2RelationTableExtractor],
@@ -496,7 +498,7 @@ object TableCommands {
       "logicalRelation",
       classOf[LogicalRelationTableExtractor],
       actionTypeDesc = Some(actionTypeDesc))
-    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(QueryDesc("query")))
+    TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryQueryDesc))
   }
 
   val InsertIntoHiveTable = {
@@ -508,13 +510,13 @@ object TableCommands {
       classOf[CatalogTableTableExtractor],
       Some(columnDesc),
       Some(actionTypeDesc))
-    val queryDesc = QueryDesc("query")
+    val queryDesc = queryQueryDesc
     TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
   }
 
   val InsertIntoDataSourceDir = {
     val cmd = "org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand"
-    val queryDesc = QueryDesc("query")
+    val queryDesc = queryQueryDesc
     TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc))
   }
 
@@ -527,7 +529,7 @@ object TableCommands {
       classOf[CatalogTableOptionTableExtractor],
       Some(columnDesc),
       actionTypeDesc = Some(actionTypeDesc))
-    val queryDesc = QueryDesc("query")
+    val queryDesc = queryQueryDesc
     TableCommandSpec(cmd, Seq(tableDesc), queryDescs = Seq(queryDesc))
   }
 
@@ -557,7 +559,7 @@ object TableCommands {
     TableCommandSpec(cmd, Seq(tableIdentDesc.copy(isInput = true)))
   }
 
-  val data = Array(
+  val data: Array[TableCommandSpec] = Array(
     AddPartitions,
     DropPartitions,
     RenamePartitions,
diff --git a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala
index 7bb449469..88ff0b45b 100644
--- a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala
+++ b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/package.scala
@@ -20,9 +20,18 @@ package org.apache.kyuubi.plugin.spark.authz
 import scala.language.implicitConversions
 
 import org.apache.kyuubi.plugin.spark.authz.OperationType.OperationType
+import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType.PrivilegeObjectActionType
+import org.apache.kyuubi.plugin.spark.authz.serde.FunctionType.FunctionType
+import org.apache.kyuubi.plugin.spark.authz.serde.TableType.TableType
 
 package object gen {
   implicit def classSimpleName(clz: Class[_]): String = clz.getSimpleName
 
+  implicit def actionTypeStr(t: PrivilegeObjectActionType): String = t.toString
+
+  implicit def functionTypeStr(t: FunctionType): String = t.toString
+
   implicit def operationTypeStr(t: OperationType): String = t.toString
+
+  implicit def tableTypeStr(t: TableType): String = t.toString
 }